MCES10 commited on
Commit
d48b95b
·
verified ·
1 Parent(s): 8b63408

Update index.html

Browse files
Files changed (1) hide show
  1. index.html +58 -20
index.html CHANGED
@@ -21,36 +21,74 @@
21
  <gradio-lite>
22
  <gradio-file name="app.py" entrypoint>
23
  import gradio as gr
 
24
 
25
- from filters import as_gray
 
 
 
26
 
27
- def process(input_image):
28
- output_image = as_gray(input_image)
29
- return output_image
30
 
31
- demo = gr.Interface(
32
- process,
33
- "image",
34
- "image",
35
- examples=["lion.jpg", "logo.png"],
36
- )
 
 
 
37
 
38
- demo.launch()
39
- </gradio-file>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
 
41
- <gradio-file name="filters.py">
42
- from skimage.color import rgb2gray
43
 
44
- def as_gray(image):
45
- return rgb2gray(image)
46
  </gradio-file>
47
 
48
- <gradio-file name="lion.jpg" url="https://raw.githubusercontent.com/gradio-app/gradio/main/gradio/test_data/lion.jpg" />
49
- <gradio-file name="logo.png" url="https://raw.githubusercontent.com/gradio-app/gradio/main/guides/assets/logo.png" />
50
 
51
  <gradio-requirements>
52
- # Same syntax as requirements.txt
53
- scikit-image
54
  </gradio-requirements>
55
  </gradio-lite>
56
  </body>
 
21
  <gradio-lite>
22
  <gradio-file name="app.py" entrypoint>
23
  import gradio as gr
24
+ from huggingface_hub import InferenceClient
25
 
26
+ """
27
+ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
28
+ """
29
+ client = InferenceClient("MCES10-Software/Ricky-Llama-3.2")
30
 
 
 
 
31
 
32
+ def respond(
33
+ message,
34
+ history: list[tuple[str, str]],
35
+ system_message,
36
+ max_tokens,
37
+ temperature,
38
+ top_p,
39
+ ):
40
+ messages = [{"role": "system", "content": system_message}]
41
 
42
+ for val in history:
43
+ if val[0]:
44
+ messages.append({"role": "user", "content": val[0]})
45
+ if val[1]:
46
+ messages.append({"role": "assistant", "content": val[1]})
47
+
48
+ messages.append({"role": "user", "content": message})
49
+
50
+ response = ""
51
+
52
+ for message in client.chat_completion(
53
+ messages,
54
+ max_tokens=max_tokens,
55
+ stream=True,
56
+ temperature=temperature,
57
+ top_p=top_p,
58
+ ):
59
+ token = message.choices[0].delta.content
60
+
61
+ response += token
62
+ yield response
63
+
64
+
65
+ """
66
+ For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
67
+ """
68
+ demo = gr.ChatInterface(
69
+ respond,
70
+ additional_inputs=[
71
+ gr.Textbox(value="You are a friendly Chatbot named RICKY (REALLY INTELLIGENT COMPUTING KEEPS YOU) LLAMA 3.2 1B INSTRUCT FINE TUNED BY MCES10 Software.", label="System message"),
72
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
73
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
74
+ gr.Slider(
75
+ minimum=0.1,
76
+ maximum=1.0,
77
+ value=0.95,
78
+ step=0.05,
79
+ label="Top-p (nucleus sampling)",
80
+ ),
81
+ ],
82
+ )
83
 
 
 
84
 
85
+ if __name__ == "__main__":
86
+ demo.launch()
87
  </gradio-file>
88
 
 
 
89
 
90
  <gradio-requirements>
91
+ huggingface_hub==0.25.2
 
92
  </gradio-requirements>
93
  </gradio-lite>
94
  </body>