Samuelblue commited on
Commit
ccee536
·
1 Parent(s): 7a516a1
Files changed (1) hide show
  1. app.py +20 -29
app.py CHANGED
@@ -1,31 +1,22 @@
1
 
2
 
3
- import gradio
4
- import graphics as gr
5
-
6
- # Create a text input
7
- text_input = gr.inputs.Textbox(lines=2, label="Enter your text:")
8
-
9
- # Create a text output
10
- text_output = gr.outputs.Textbox(label="ChatGPT Output:")
11
-
12
- # Create a button
13
- button = gradio.inputs.Button(label="Submit")
14
-
15
- # Create a form
16
- form = gr.Interface(
17
- text_input,
18
- button,
19
- text_output,
20
- title="ChatGPT",
21
- description="Chat with a GPT-3 model"
22
- )
23
-
24
- # Define the function that will be called when the button is clicked
25
- def process_input(values):
26
- text = values["text_input"]
27
- response = chatgpt.chat(text)
28
- return {"text_output": response}
29
-
30
- # Launch the form
31
- form.launch(process_input)
 
1
 
2
 
3
+ import gradio as gr
4
+ import torch
5
+ from transformers import GPT2Tokenizer, GPT2LMHeadModel
6
+
7
+ # Initialize tokenizer and model
8
+ tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
9
+ model = GPT2LMHeadModel.from_pretrained('gpt2')
10
+
11
+ # Create a function to generate text
12
+ def generate_text(input_text):
13
+ # Encode the input text
14
+ input_ids = tokenizer.encode(input_text, return_tensors='pt')
15
+ # Generate the output text
16
+ output_ids = model.generate(input_ids, max_length=50, do_sample=True, top_k=50, top_p=0.95, num_return_sequences=1)
17
+ # Decode the output text
18
+ output_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
19
+ return output_text
20
+
21
+ # Create the interface
22
+ gr.Interface(fn=generate_text, inputs="text", outputs="text", title="Chat GPT").launch()