ginipick commited on
Commit
8fcba35
ยท
verified ยท
1 Parent(s): eeb3f3c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -93
app.py CHANGED
@@ -1,99 +1,44 @@
1
- import gradio as gr
2
- from huggingface_hub import InferenceClient
3
- import os
4
  import requests
5
- from typing import List, Tuple
6
-
7
- # ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
8
- hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
9
- #hf_client = InferenceClient("CohereForAI/aya-23-35B", token=os.getenv("HF_TOKEN"))
10
-
11
- def load_code(filename):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  try:
13
- with open(filename, 'r', encoding='utf-8') as file:
14
- return file.read()
15
- except FileNotFoundError:
16
- return f"{filename} ํŒŒ์ผ์„ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค."
17
  except Exception as e:
18
- return f"ํŒŒ์ผ์„ ์ฝ๋Š” ์ค‘ ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
19
-
20
- fashion_code = load_code('fashion.cod')
21
- uhdimage_code = load_code('uhdimage.cod')
22
- MixGEN_code = load_code('mgen.cod')
23
-
24
- def respond(
25
- message,
26
- history: List[Tuple[str, str]],
27
- system_message="", # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
28
- max_tokens=1024, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
29
- temperature=0.7, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
30
- top_p=0.9, # ๊ธฐ๋ณธ๊ฐ’ ์ถ”๊ฐ€
31
- ):
32
- global fashion_code, uhdimage_code, MixGEN_code
33
- system_message = system_message or ""
34
- system_prefix = """๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ• ๊ฒƒ. ๋„ˆ๋Š” ์ฃผ์–ด์ง„ ์†Œ์Šค์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ \"์„œ๋น„์Šค ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์•ˆ๋‚ด, qna๋ฅผ ํ•˜๋Š” ์—ญํ• ์ด๋‹ค\". ์•„์ฃผ ์นœ์ ˆํ•˜๊ณ  ์ž์„ธํ•˜๊ฒŒ 4000ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ. ๋„ˆ๋Š” ์ฝ”๋“œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์‚ฌ์šฉ ์„ค๋ช… ๋ฐ ์งˆ์˜ ์‘๋‹ต์„ ์ง„ํ–‰ํ•˜๋ฉฐ, ์ด์šฉ์ž์—๊ฒŒ ๋„์›€์„ ์ฃผ์–ด์•ผ ํ•œ๋‹ค. ์ด์šฉ์ž๊ฐ€ ๊ถ๊ธˆํ•ด ํ•  ๋งŒ ํ•œ ๋‚ด์šฉ์— ์นœ์ ˆํ•˜๊ฒŒ ์•Œ๋ ค์ฃผ๋„๋ก ํ•˜๋ผ. ์ฝ”๋“œ ์ „์ฒด ๋‚ด์šฉ์— ๋Œ€ํ•ด์„œ๋Š” ๋ณด์•ˆ์„ ์œ ์ง€ํ•˜๊ณ , ํ‚ค ๊ฐ’ ๋ฐ ์—”๋“œํฌ์ธํŠธ์™€ ๊ตฌ์ฒด์ ์ธ ๋ชจ๋ธ์€ ๊ณต๊ฐœํ•˜์ง€ ๋งˆ๋ผ. """
35
-
36
- if message.lower() == "ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰":
37
- system_message += f"\n\nํŒจ์…˜ ์ฝ”๋“œ ๋‚ด์šฉ:\n{fashion_code}"
38
- message = "ํŒจ์…˜ ๊ฐ€์ƒํ”ผํŒ…์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://aiqcamp-fash.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
39
- elif message.lower() == "uhd ์ด๋ฏธ์ง€ ์ฝ”๋“œ ์‹คํ–‰":
40
- system_message += f"\n\nUHD ์ด๋ฏธ์ง€ ์ฝ”๋“œ ๋‚ด์šฉ:\n{uhdimage_code}"
41
- message = "UHD ์ด๋ฏธ์ง€ ์ƒ์„ฑ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://openfree-ultpixgen.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
42
- elif message.lower() == "mixgen ์ฝ”๋“œ ์‹คํ–‰":
43
- system_message += f"\n\nMixGEN ์ฝ”๋“œ ๋‚ด์šฉ:\n{MixGEN_code}"
44
- message = "MixGEN3 ์ด๋ฏธ์ง€ ์ƒ์„ฑ์— ๋Œ€ํ•œ ๋‚ด์šฉ์„ ํ•™์Šตํ•˜์˜€๊ณ , ์„ค๋ช…ํ•  ์ค€๋น„๊ฐ€ ๋˜์–ด์žˆ๋‹ค๊ณ  ์•Œ๋ฆฌ๊ณ  ์„œ๋น„์Šค URL(https://openfree-mixgen3.hf.space)์„ ํ†ตํ•ด ํ…Œ์ŠคํŠธ ํ•ด๋ณด๋ผ๊ณ  ์ถœ๋ ฅํ•˜๋ผ."
45
-
46
- messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
47
- for val in history:
48
- if val[0]:
49
- messages.append({"role": "user", "content": val[0]})
50
- if val[1]:
51
- messages.append({"role": "assistant", "content": val[1]})
52
- messages.append({"role": "user", "content": message})
53
-
54
- response = ""
55
- for message in hf_client.chat_completion(
56
- messages,
57
- max_tokens=max_tokens,
58
- stream=True,
59
- temperature=temperature,
60
- top_p=top_p,
61
- ):
62
- token = message.choices[0].delta.get('content', None)
63
- if token:
64
- response += token.strip("")
65
- yield response
66
-
67
- css = """
68
- footer {
69
- visibility: hidden;
70
- }
71
- """
72
-
73
- # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ • ๋ถ€๋ถ„๋„ ์ˆ˜์ •
74
- demo = gr.ChatInterface(
75
- respond,
76
- additional_inputs=[
77
- gr.Textbox(label="System Message", value=""),
78
- gr.Slider(minimum=1, maximum=8000, value=4000, label="Max Tokens"),
79
- gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature"),
80
- gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"),
81
- ],
82
- examples=[
83
- ["ํŒจ์…˜ ์ฝ”๋“œ ์‹คํ–‰"],
84
- ["UHD ์ด๋ฏธ์ง€ ์ฝ”๋“œ ์‹คํ–‰"],
85
- ["MixGEN ์ฝ”๋“œ ์‹คํ–‰"],
86
- ["์ƒ์„ธํ•œ ์‚ฌ์šฉ ๋ฐฉ๋ฒ•์„ ๋งˆ์น˜ ํ™”๋ฉด์„ ๋ณด๋ฉด์„œ ์„ค๋ช…ํ•˜๋“ฏ์ด 4000 ํ† ํฐ ์ด์ƒ ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
87
- ["FAQ 20๊ฑด์„ ์ƒ์„ธํ•˜๊ฒŒ ์ž‘์„ฑํ•˜๋ผ. 4000ํ† ํฐ ์ด์ƒ ์‚ฌ์šฉํ•˜๋ผ."],
88
- ["์‚ฌ์šฉ ๋ฐฉ๋ฒ•๊ณผ ์ฐจ๋ณ„์ , ํŠน์ง•, ๊ฐ•์ ์„ ์ค‘์‹ฌ์œผ๋กœ 4000 ํ† ํฐ ์ด์ƒ ์œ ํŠœ๋ธŒ ์˜์ƒ ์Šคํฌ๋ฆฝํŠธ ํ˜•ํƒœ๋กœ ์ž‘์„ฑํ•˜๋ผ"],
89
- ["๋ณธ ์„œ๋น„์Šค๋ฅผ SEO ์ตœ์ ํ™”ํ•˜์—ฌ ๋ธ”๋กœ๊ทธ ํฌ์ŠคํŠธ(๋ฐฐ๊ฒฝ ๋ฐ ํ•„์š”์„ฑ, ๊ธฐ์กด ์œ ์‚ฌ ์„œ๋น„์Šค์™€ ๋น„๊ตํ•˜์—ฌ ํŠน์žฅ์ , ํ™œ์šฉ์ฒ˜, ๊ฐ€์น˜, ๊ธฐ๋Œ€ํšจ๊ณผ, ๊ฒฐ๋ก ์„ ํฌํ•จ)๋กœ 4000 ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ"],
90
- ["ํŠนํ—ˆ ์ถœ์›์— ํ™œ์šฉํ•  ๊ธฐ์ˆ  ๋ฐ ๋น„์ฆˆ๋‹ˆ์Šค๋ชจ๋ธ ์ธก๋ฉด์„ ํฌํ•จํ•˜์—ฌ ํŠนํ—ˆ ์ถœ์›์„œ ๊ตฌ์„ฑ์— ๋งž๊ฒŒ ํ˜์‹ ์ ์ธ ์ฐฝ์˜ ๋ฐœ๋ช… ๋‚ด์šฉ์„ ์ค‘์‹ฌ์œผ๋กœ 4000ํ† ํฐ ์ด์ƒ ์ž‘์„ฑํ•˜๋ผ."],
91
- ["๊ณ„์† ์ด์–ด์„œ ๋‹ต๋ณ€ํ•˜๋ผ"],
92
- ],
93
- css=css,
94
- theme="Nymbo/Nymbo_Theme",
95
- cache_examples=False, # ์บ์‹ฑ ๋น„ํ™œ์„ฑํ™” ์„ค์ •
96
  )
97
 
98
  if __name__ == "__main__":
99
- demo.launch()
 
 
 
 
1
  import requests
2
+ import gradio as gr
3
+ from typing import List, Dict
4
+
5
+ def get_most_liked_spaces(limit: int = 10) -> List[Dict]:
6
+ url = "https://huggingface.co/api/spaces"
7
+ params = {
8
+ "sort": "likes",
9
+ "direction": -1,
10
+ "limit": limit,
11
+ "full": "true"
12
+ }
13
+
14
+ response = requests.get(url, params=params)
15
+ response.raise_for_status()
16
+
17
+ return response.json()
18
+
19
+ def format_spaces(spaces: List[Dict]) -> str:
20
+ output = ""
21
+ for idx, space in enumerate(spaces, 1):
22
+ output += f"{idx}. {space['name']} by {space['author']}\n"
23
+ output += f" Likes: {space['likes']}\n"
24
+ output += f" URL: https://huggingface.co/spaces/{space['id']}\n\n"
25
+ return output
26
+
27
+ def get_spaces_list(limit: int) -> str:
28
  try:
29
+ spaces = get_most_liked_spaces(limit)
30
+ return format_spaces(spaces)
 
 
31
  except Exception as e:
32
+ return f"An error occurred: {str(e)}"
33
+
34
+ # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ •์˜
35
+ iface = gr.Interface(
36
+ fn=get_spaces_list,
37
+ inputs=gr.Slider(minimum=1, maximum=50, step=1, label="Number of Spaces to Display", value=10),
38
+ outputs="text",
39
+ title="Hugging Face Most Liked Spaces",
40
+ description="Display the most liked Hugging Face Spaces in descending order.",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  )
42
 
43
  if __name__ == "__main__":
44
+ iface.launch()