Surn commited on
Commit
eef4dbe
·
1 Parent(s): 21c8b01

Update Demo

Browse files
Files changed (2) hide show
  1. app.py +8 -2
  2. modules/version_info.py +123 -0
app.py CHANGED
@@ -6,6 +6,8 @@ from pathlib import Path
6
 
7
  import gradio as gr
8
  import src.gradio_user_history as gr_user_history
 
 
9
  from gradio_client import Client
10
  #from gradio_space_ci import enable_space_ci
11
 
@@ -64,19 +66,23 @@ with gr.Blocks(css="style.css") as demo:
64
  )
65
  prompt.submit(fn=generate, inputs=[prompt,negprompt], outputs=gallery)
66
 
67
- with gr.Blocks() as demo_with_history:
68
  with gr.Tab("README"):
69
  gr.Markdown(Path("README.md").read_text(encoding="utf-8").split("---")[-1])
70
  with gr.Tab("Demo"):
71
  demo.render()
72
  with gr.Tab("Past generations"):
 
73
  gr_user_history.render()
 
 
 
74
 
75
  if __name__ == "__main__":
76
  launch_args = {}
77
  launch_kwargs = {}
78
  launch_kwargs['allowed_paths'] = ["assets/", "data/_user_history", "/data/_user_history/Surn"]
79
- launch_kwargs['favicon_path'] = "./assets/favicon.ico"
80
  #launch_kwargs['inbrowser'] = True
81
 
82
  demo_with_history.queue().launch(**launch_kwargs)
 
6
 
7
  import gradio as gr
8
  import src.gradio_user_history as gr_user_history
9
+ from modules.version_info import versions_html
10
+
11
  from gradio_client import Client
12
  #from gradio_space_ci import enable_space_ci
13
 
 
66
  )
67
  prompt.submit(fn=generate, inputs=[prompt,negprompt], outputs=gallery)
68
 
69
+ with gr.Blocks(theme='Surn/beeuty@==0.5.24') as demo_with_history:
70
  with gr.Tab("README"):
71
  gr.Markdown(Path("README.md").read_text(encoding="utf-8").split("---")[-1])
72
  with gr.Tab("Demo"):
73
  demo.render()
74
  with gr.Tab("Past generations"):
75
+ gr_user_history.setup(display_type="image_path") # optional, this is where you would set the display type = "video_path" if you want to display videos
76
  gr_user_history.render()
77
+ with gr.Row("Versions") as versions_row:
78
+ gr.HTML(value=versions_html(), visible=True, elem_id="versions")
79
+
80
 
81
  if __name__ == "__main__":
82
  launch_args = {}
83
  launch_kwargs = {}
84
  launch_kwargs['allowed_paths'] = ["assets/", "data/_user_history", "/data/_user_history/Surn"]
85
+ launch_kwargs['favicon_path'] = "assets/favicon.ico"
86
  #launch_kwargs['inbrowser'] = True
87
 
88
  demo_with_history.queue().launch(**launch_kwargs)
modules/version_info.py ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # version_info.py
2
+
3
+ from src.gradio_user_history import __version__ as user_history_version
4
+ import subprocess
5
+ import os
6
+ import sys
7
+ import gc
8
+ import gradio as gr
9
+
10
+ git = os.environ.get('GIT', "git")
11
+
12
+ def commit_hash():
13
+ try:
14
+ return subprocess.check_output([git, "rev-parse", "HEAD"], shell=False, encoding='utf8').strip()
15
+ except Exception:
16
+ return "<none>"
17
+
18
+ def get_xformers_version():
19
+ try:
20
+ import xformers
21
+ return xformers.__version__
22
+ except Exception:
23
+ return "<none>"
24
+ def get_transformers_version():
25
+ try:
26
+ import transformers
27
+ return transformers.__version__
28
+ except Exception:
29
+ return "<none>"
30
+
31
+ def get_accelerate_version():
32
+ try:
33
+ import accelerate
34
+ return accelerate.__version__
35
+ except Exception:
36
+ return "<none>"
37
+ def get_safetensors_version():
38
+ try:
39
+ import safetensors
40
+ return safetensors.__version__
41
+ except Exception:
42
+ return "<none>"
43
+ def get_diffusers_version():
44
+ try:
45
+ import diffusers
46
+ return diffusers.__version__
47
+ except Exception:
48
+ return "<none>"
49
+
50
+ def get_torch_info():
51
+ from torch import __version__ as torch_version_, version, cuda, backends
52
+ device_type = initialize_cuda()
53
+ if device_type == "cuda":
54
+ try:
55
+ info = [torch_version_, f"CUDA Version:{version.cuda}", f"Available:{cuda.is_available()}", f"flash attention enabled: {backends.cuda.flash_sdp_enabled()}", f"Capabilities: {cuda.get_device_capability(0)}", f"Device Name: {cuda.get_device_name(0)}", f"Device Count: {cuda.device_count()}"]
56
+ del torch_version_, version, cuda, backends
57
+ return info
58
+ except Exception:
59
+ del torch_version_, version, cuda, backends
60
+ return "<none>"
61
+ else:
62
+ return "Not Recognized"
63
+
64
+ def release_torch_resources():
65
+ from torch import cuda
66
+ # Clear the CUDA cache
67
+ cuda.empty_cache()
68
+ cuda.ipc_collect()
69
+ # Delete any objects that are using GPU memory
70
+ #for obj in gc.get_objects():
71
+ # if is_tensor(obj) or (hasattr(obj, 'data') and is_tensor(obj.data)):
72
+ # del obj
73
+ # Run garbage collection
74
+ del cuda
75
+ gc.collect()
76
+
77
+
78
+ def initialize_cuda():
79
+ from torch import cuda, version
80
+ if cuda.is_available():
81
+ device = cuda.device("cuda")
82
+ print(f"CUDA is available. Using device: {cuda.get_device_name(0)} with CUDA version: {version.cuda}")
83
+ result = "cuda"
84
+ else:
85
+ print("CUDA is not available. Using CPU.")
86
+ result = "cpu"
87
+ return result
88
+
89
+ def versions_html():
90
+ from torch import __version__ as torch_version_
91
+ python_version = ".".join([str(x) for x in sys.version_info[0:3]])
92
+ commit = commit_hash()
93
+
94
+ # Define the Toggle Dark Mode link with JavaScript
95
+ toggle_dark_link = '''
96
+ <a href="#" onclick="document.body.classList.toggle('dark'); return false;" style="cursor: pointer; text-decoration: underline;">
97
+ Toggle Dark Mode
98
+ </a>
99
+ '''
100
+
101
+ v_html = f"""
102
+ version: <a href="https://huggingface.co/spaces/Surn/gradio-user-history/commit/{"huggingface" if commit == "<none>" else commit}" target="_blank">{"huggingface" if commit == "<none>" else commit}</a>
103
+ &#x2000;•&#x2000;
104
+ User History: {user_history_version}
105
+ &#x2000;•&#x2000;
106
+ python: <span title="{sys.version}">{python_version}</span>
107
+ &#x2000;•&#x2000;
108
+ torch: {torch_version_}
109
+ &#x2000;•&#x2000;
110
+ xformers: {get_xformers_version()}
111
+ &#x2000;•&#x2000;
112
+ transformers: {get_transformers_version()}
113
+ &#x2000;•&#x2000;
114
+ safetensors: {get_safetensors_version()}
115
+ &#x2000;•&#x2000;
116
+ gradio: {gr.__version__}
117
+ &#x2000;•&#x2000;
118
+ {toggle_dark_link}
119
+ <br>
120
+ Full GPU Info:{get_torch_info()}
121
+ """
122
+ del torch_version_
123
+ return v_html