File size: 1,284 Bytes
16f0fc5
ea39161
d6e866c
 
16f0fc5
ea39161
 
d6e866c
16f0fc5
 
 
ea39161
 
 
 
 
 
 
 
 
 
 
d6e866c
16f0fc5
 
 
 
 
 
 
 
 
 
 
 
 
 
d6e866c
16f0fc5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
import gradio as gr


model = AutoModelForCausalLM.from_pretrained("MohamedTalaat91/gpt2-wikitext2")
tokenizer = AutoTokenizer.from_pretrained("MohamedTalaat91/gpt2-tokenizer")



def generate(input_text) :
    inputs = tokenizer(input_text, return_tensors="pt")
    # Generate text based on the input
    generated_ids = model.generate(
        inputs['input_ids'], 
        max_length=100,  # Adjust the max length as needed
        num_return_sequences=1,  # Number of texts to generate
        do_sample=True,  # Enable sampling (as opposed to greedy search)
        top_k=50,  # Top-k sampling to introduce diversity
        temperature=0.7  # Controls randomness in sampling
    )
    generated_text = tokenizer.decode(generated_ids[0], skip_special_tokens=True)

    return generated_text


import gradio as gr

with gr.Blocks() as demo:
    gr.Markdown("# GPT-2 WikiText2")
    with gr.Row():
        with gr.Column():
            input_text = gr.Textbox(label="Input Text")
            generate_button = gr.Button("Generate")
            output_text = gr.Textbox(label="Generated Text")

    generate_button.click(fn=generate, inputs=input_text, outputs=output_text)

demo.launch(share=True)