LuisMBA commited on
Commit
6c48899
·
verified ·
1 Parent(s): 0b9b8cf

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +76 -0
app.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+ import torch
4
+
5
+ # Model setup
6
+ model_name = "EleutherAI/gpt-neo-125M" # Lightweight model for Spaces
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModelForCausalLM.from_pretrained(model_name).to("cpu") # CPU-friendly for Spaces
9
+
10
+ # Text generation function
11
+ def generate_text(prompt, max_length=100, temperature=0.7, top_p=0.9):
12
+ inputs = tokenizer(prompt, return_tensors="pt")
13
+ outputs = model.generate(
14
+ **inputs,
15
+ max_length=max_length,
16
+ temperature=temperature,
17
+ top_p=top_p,
18
+ do_sample=True,
19
+ pad_token_id=tokenizer.eos_token_id
20
+ )
21
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
22
+
23
+ # Global variables to store hierarchical content
24
+ global_synopsis = ""
25
+ global_chapters = ""
26
+
27
+ # Generation functions
28
+ def generate_synopsis(topic):
29
+ global global_synopsis
30
+ prompt = f"Write a synopsis for a story about {topic}: "
31
+ global_synopsis = generate_text(prompt, max_length=50)
32
+ return global_synopsis
33
+
34
+ def generate_chapters():
35
+ global global_synopsis, global_chapters
36
+ if not global_synopsis:
37
+ return "Please generate a synopsis first."
38
+ prompt = f"Based on this synopsis: {global_synopsis}, divide the story into 3 chapters with brief descriptions for each."
39
+ global_chapters = generate_text(prompt, max_length=150)
40
+ return global_chapters
41
+
42
+ def expand_chapter(chapter_number):
43
+ global global_chapters
44
+ if not global_chapters:
45
+ return "Please generate chapters first."
46
+ chapters = global_chapters.split("\n")
47
+ if chapter_number <= 0 or chapter_number > len(chapters):
48
+ return f"Select a number between 1 and {len(chapters)}."
49
+ prompt = f"Expand and describe Chapter {chapter_number} in more detail: {chapters[chapter_number - 1]}"
50
+ return generate_text(prompt, max_length=200)
51
+
52
+ # Gradio interface
53
+ with gr.Blocks() as demo:
54
+ gr.Markdown("## AI Hierarchical Story Generator")
55
+
56
+ with gr.Tab("Generate Synopsis"):
57
+ topic_input = gr.Textbox(label="Enter the story's main topic")
58
+ synopsis_output = gr.Textbox(label="Generated Synopsis", interactive=False)
59
+ synopsis_button = gr.Button("Generate Synopsis")
60
+
61
+ with gr.Tab("Generate Chapters"):
62
+ chapters_output = gr.Textbox(label="Generated Chapters", interactive=False)
63
+ chapters_button = gr.Button("Generate Chapters")
64
+
65
+ with gr.Tab("Expand Chapter"):
66
+ chapter_input = gr.Number(label="Chapter Number", precision=0)
67
+ chapter_detail_output = gr.Textbox(label="Expanded Chapter", interactive=False)
68
+ chapter_button = gr.Button("Expand Chapter")
69
+
70
+ # Connect functions to UI
71
+ synopsis_button.click(generate_synopsis, inputs=topic_input, outputs=synopsis_output)
72
+ chapters_button.click(generate_chapters, outputs=chapters_output)
73
+ chapter_button.click(expand_chapter, inputs=chapter_input, outputs=chapter_detail_output)
74
+
75
+ # Launch the app
76
+ demo.launch()