numinatest / app.py
Pmal's picture
created app.py
ed57fa1 verified
raw
history blame
1.31 kB
import gradio as gr
from transformers import pipeline
# Load the model and tokenizer
def load_model():
# Load the NuminaMath-72B-CoT model
pipe = pipeline(
"text-generation",
model="AI-MO/NuminaMath-72B-CoT",
torch_dtype="auto",
device_map="auto" # Automatically map to available GPU/CPU
)
return pipe
# Initialize the pipeline
model_pipeline = load_model()
# Define the function to process inputs
def solve_math_question(prompt):
# Generate output using the model
outputs = model_pipeline(prompt, max_new_tokens=1024, do_sample=False)
return outputs[0]["generated_text"]
# Define the Gradio interface
with gr.Blocks() as app:
gr.Markdown("# NuminaMath-72B-CoT Math Question Solver")
gr.Markdown(
"Ask a math-related question, and the model will attempt to solve it with reasoning!"
)
with gr.Row():
question = gr.Textbox(
label="Your Math Question",
placeholder="E.g., For how many values of the constant k will the polynomial x^2 + kx + 36 have two distinct integer roots?",
)
output = gr.Textbox(label="Model Output")
submit_button = gr.Button("Solve")
submit_button.click(solve_math_question, inputs=question, outputs=output)
# Launch the app
app.launch()