Update app.py
Browse files
app.py
CHANGED
@@ -137,7 +137,7 @@ if __name__ == "__main__":
|
|
137 |
name: Literal["flux-dev", "flux-dev-fp8", "flux-schnell"] = "flux-dev"
|
138 |
device: Literal["cuda", "cpu"] = "cuda" if torch.cuda.is_available() else "cpu"
|
139 |
offload: bool = dataclasses.field(
|
140 |
-
default=
|
141 |
metadata={"help": "If True, sequantial offload the models(ae, dit, text encoder) to CPU if not used."}
|
142 |
)
|
143 |
port: int = 7860
|
@@ -147,4 +147,5 @@ if __name__ == "__main__":
|
|
147 |
args = args_tuple[0]
|
148 |
|
149 |
demo = create_demo(args.name, args.device, args.offload)
|
150 |
-
demo.launch(server_port=args.port, ssr_mode=False)
|
|
|
|
137 |
name: Literal["flux-dev", "flux-dev-fp8", "flux-schnell"] = "flux-dev"
|
138 |
device: Literal["cuda", "cpu"] = "cuda" if torch.cuda.is_available() else "cpu"
|
139 |
offload: bool = dataclasses.field(
|
140 |
+
default=True,
|
141 |
metadata={"help": "If True, sequantial offload the models(ae, dit, text encoder) to CPU if not used."}
|
142 |
)
|
143 |
port: int = 7860
|
|
|
147 |
args = args_tuple[0]
|
148 |
|
149 |
demo = create_demo(args.name, args.device, args.offload)
|
150 |
+
#demo.launch(server_port=args.port, ssr_mode=False)
|
151 |
+
demo.launch(server_port=args.port, share = True)
|