LuisMBA's picture
Update app.py
f844445 verified
raw
history blame
3.38 kB
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
# Model setup
#model_name = "EleutherAI/gpt-neo-125M" # Lightweight model for Spaces
model_name = "EleutherAI/gpt-neo-1.3B" # A bit better model
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name).to("cpu") # CPU-friendly for Spaces
# Text generation function
def generate_text(prompt, max_length=100, temperature=0.7, top_p=0.9):
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(
**inputs,
max_length=max_length,
temperature=temperature,
top_p=top_p,
do_sample=True,
pad_token_id=tokenizer.eos_token_id
)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
# Global variables to store hierarchical content
global_synopsis = ""
global_chapters = ""
# Generation functions
def generate_synopsis(topic):
global global_synopsis
prompt = f"Write a brief synopsis for a story about {topic}: "
global_synopsis = generate_text(prompt, max_length=100)
return global_synopsis
def generate_chapters():
global global_synopsis, global_chapters
if not global_synopsis:
return "Please generate a synopsis first."
prompt = f'''Based on this synopsis for a book: {global_synopsis}. Divide the story into 4 chapters with brief descriptions for each.
Enumerate every chapter created followed by its description and make the first chapter sound like an introduction and the last may sound as the epilogue'''
global_chapters = generate_text(prompt, max_length=2000)
return global_chapters
def expand_chapter(chapter_number):
global global_chapters
if not global_chapters:
return "Please generate chapters first."
chapters = global_chapters.split("\n")
if chapter_number <= 0 or chapter_number > len(chapters):
return f"Select a number between 1 and {len(chapters)}."
prompt = f'''Knowing this synopsis for a book: {global_synopsis}. Expand and describe Chapter {chapter_number}
in more detail, the title and current brief description of this chapter is: {chapters[chapter_number - 1]}'''
return generate_text(prompt, max_length=200)
# Gradio interface
with gr.Blocks() as demo:
gr.Markdown("## AI Hierarchical Story Generator")
with gr.Tab("Generate Synopsis"):
topic_input = gr.Textbox(label="Enter the story's main topic")
synopsis_output = gr.Textbox(label="Generated Synopsis", interactive=False)
synopsis_button = gr.Button("Generate Synopsis")
with gr.Tab("Generate Chapters"):
chapters_output = gr.Textbox(label="Generated Chapters", interactive=False)
chapters_button = gr.Button("Generate Chapters")
with gr.Tab("Expand Chapter"):
chapter_input = gr.Number(label="Chapter Number", precision=0)
chapter_detail_output = gr.Textbox(label="Expanded Chapter", interactive=False)
chapter_button = gr.Button("Expand Chapter")
# Connect functions to UI
synopsis_button.click(generate_synopsis, inputs=topic_input, outputs=synopsis_output)
chapters_button.click(generate_chapters, outputs=chapters_output)
chapter_button.click(expand_chapter, inputs=chapter_input, outputs=chapter_detail_output)
# Launch the app
demo.launch()