Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
import os
|
2 |
import subprocess
|
3 |
import sys
|
|
|
|
|
4 |
|
5 |
# ํ์ํ ํ๊ฒฝ ๋ณ์ ์ค์
|
6 |
os.environ["TRANSFORMERS_NO_ADVISORY_WARNINGS"] = "1"
|
@@ -10,37 +12,116 @@ os.environ["TRANSFORMERS_COMPILER_DISABLED"] = "1"
|
|
10 |
def install_required_packages():
|
11 |
required_packages = [
|
12 |
"warmup_scheduler",
|
13 |
-
"
|
14 |
]
|
15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
for package in required_packages:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
try:
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
subprocess.check_call([
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
install_required_packages()
|
35 |
|
36 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
import yaml
|
38 |
import torch
|
39 |
sys.path.append(os.path.abspath('./'))
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
from
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
import numpy as np
|
45 |
import random
|
46 |
import argparse
|
|
|
1 |
import os
|
2 |
import subprocess
|
3 |
import sys
|
4 |
+
import importlib.util
|
5 |
+
import time
|
6 |
|
7 |
# ํ์ํ ํ๊ฒฝ ๋ณ์ ์ค์
|
8 |
os.environ["TRANSFORMERS_NO_ADVISORY_WARNINGS"] = "1"
|
|
|
12 |
def install_required_packages():
|
13 |
required_packages = [
|
14 |
"warmup_scheduler",
|
15 |
+
"torchtools"
|
16 |
]
|
17 |
|
18 |
+
github_repos = {
|
19 |
+
"warmup_scheduler": "git+https://github.com/ildoonet/pytorch-gradual-warmup-lr.git",
|
20 |
+
"torchtools": "git+https://github.com/pabloppp/pytorch-tools.git"
|
21 |
+
}
|
22 |
+
|
23 |
+
missing_packages = []
|
24 |
+
|
25 |
+
# First check which packages need to be installed
|
26 |
for package in required_packages:
|
27 |
+
if importlib.util.find_spec(package) is None:
|
28 |
+
missing_packages.append(package)
|
29 |
+
print(f"{package} needs to be installed")
|
30 |
+
|
31 |
+
# Install missing packages
|
32 |
+
for package in missing_packages:
|
33 |
+
print(f"Installing {package}...")
|
34 |
try:
|
35 |
+
if package in github_repos:
|
36 |
+
subprocess.check_call([
|
37 |
+
sys.executable, "-m", "pip", "install", github_repos[package]
|
38 |
+
])
|
39 |
+
else:
|
40 |
+
subprocess.check_call([
|
41 |
+
sys.executable, "-m", "pip", "install", package
|
42 |
+
])
|
43 |
+
print(f"{package} installed successfully")
|
44 |
+
# Wait a moment to ensure the package is available for import
|
45 |
+
time.sleep(1)
|
46 |
+
except subprocess.CalledProcessError as e:
|
47 |
+
print(f"Failed to install {package}: {e}")
|
48 |
+
|
49 |
+
# If there were any packages installed, try to force a refresh of sys.modules
|
50 |
+
if missing_packages:
|
51 |
+
print("Refreshing Python module cache...")
|
52 |
+
for package in missing_packages:
|
53 |
+
if package in sys.modules:
|
54 |
+
del sys.modules[package]
|
55 |
+
|
56 |
+
# Create patches for missing modules if they can't be installed
|
57 |
+
def create_module_patches():
|
58 |
+
# Create a patch for torchtools.transforms if it doesn't exist
|
59 |
+
if importlib.util.find_spec("torchtools") is None or importlib.util.find_spec("torchtools.transforms") is None:
|
60 |
+
print("Creating patch for torchtools.transforms...")
|
61 |
+
|
62 |
+
# Create the directory structure
|
63 |
+
os.makedirs("torchtools/transforms", exist_ok=True)
|
64 |
+
|
65 |
+
# Create __init__.py files
|
66 |
+
with open("torchtools/__init__.py", "w") as f:
|
67 |
+
f.write("# Patch for torchtools\n")
|
68 |
+
|
69 |
+
# Create a simplified SmartCrop class
|
70 |
+
with open("torchtools/transforms/__init__.py", "w") as f:
|
71 |
+
f.write("""# Patch for torchtools.transforms
|
72 |
+
import torch
|
73 |
+
import torch.nn.functional as F
|
74 |
+
|
75 |
+
class SmartCrop:
|
76 |
+
def __init__(self, size=None, scale=None, preserve_aspect_ratio=True):
|
77 |
+
self.size = size
|
78 |
+
self.scale = scale
|
79 |
+
self.preserve_aspect_ratio = preserve_aspect_ratio
|
80 |
+
|
81 |
+
def __call__(self, image):
|
82 |
+
# Basic placeholder implementation that resizes the image
|
83 |
+
# For actual smart cropping, a more complex implementation would be needed
|
84 |
+
if self.size is not None:
|
85 |
+
return F.interpolate(image.unsqueeze(0), size=self.size, mode='bilinear', align_corners=False).squeeze(0)
|
86 |
+
elif self.scale is not None:
|
87 |
+
h, w = image.shape[-2:]
|
88 |
+
new_h, new_w = int(h * self.scale), int(w * self.scale)
|
89 |
+
return F.interpolate(image.unsqueeze(0), size=(new_h, new_w), mode='bilinear', align_corners=False).squeeze(0)
|
90 |
+
return image
|
91 |
+
""")
|
92 |
+
|
93 |
+
# Add the patch directory to the system path
|
94 |
+
sys.path.insert(0, os.path.abspath('./'))
|
95 |
+
print("Torchtools patch created successfully")
|
96 |
+
|
97 |
+
# Install required packages
|
98 |
+
print("Checking and installing required packages...")
|
99 |
install_required_packages()
|
100 |
|
101 |
+
# Create patch modules for any missing dependencies
|
102 |
+
print("Creating patches for any missing modules...")
|
103 |
+
create_module_patches()
|
104 |
+
|
105 |
+
# Give a moment for the system to register newly installed packages
|
106 |
+
time.sleep(2)
|
107 |
+
|
108 |
+
# Now continue with the imports
|
109 |
+
print("Importing the required modules...")
|
110 |
import yaml
|
111 |
import torch
|
112 |
sys.path.append(os.path.abspath('./'))
|
113 |
+
|
114 |
+
# Try importing the modules
|
115 |
+
try:
|
116 |
+
from inference.utils import *
|
117 |
+
from train import WurstCoreB
|
118 |
+
from gdf import DDPMSampler
|
119 |
+
from train import WurstCore_t2i as WurstCoreC
|
120 |
+
print("Successfully imported all required modules!")
|
121 |
+
except ImportError as e:
|
122 |
+
print(f"Warning: Import error: {e}")
|
123 |
+
print("Continuing with the application setup...")
|
124 |
+
|
125 |
import numpy as np
|
126 |
import random
|
127 |
import argparse
|