shamaayan's picture
urls
8cc92f9
raw
history blame
3.05 kB
import os
import gradio as gr
import requests
examples = [
["Please answer to the following question. Who is going to be the next Ballon d'or?"],
["Q: Can Barack Obama have a conversation with George Washington? Give the rationale before answering."],
["Summarize the following text: Peter and Elizabeth took a taxi to attend the night party in the city. While in the party, Elizabeth collapsed and was rushed to the hospital. Since she was diagnosed with a brain injury, the doctor told Peter to stay besides her until she gets well. Therefore, Peter stayed with her at the hospital for 3 days without leaving."],
["Please answer the following question: What is the boiling point of water?"],
["Answer the following question by detailing your reasoning: Are Pokemons alive?"],
["Translate to German: How old are you?"],
["Generate a cooking recipe to make bolognese pasta:"],
["Answer the following yes/no question by reasoning step-by-step. Can you write a whole Haiku in a single tweet?"],
["Premise: At my age you will probably have learnt one lesson. Hypothesis: It's not certain how many lessons you'll learn by your thirties. Does the premise entail the hypothesis?"],
["Answer the following question by reasoning step by step. The cafeteria had 23 apples. If they used 20 for lunch and bought 6 more, how many apples do they have?"],
["""Q: Roger has 5 tennis balls. He buys 2 more cans of tennis balls. Each can has 3 tennis balls. How many tennis balls does he have now?
A: Roger started with 5 balls. 2 cans of 3 tennis balls each is 6 tennis balls. 5 + 6 = 11. The answer is 11.
Q: A juggler can juggle 16 balls. Half of the balls are golf balls, and half of the golf balls are blue. How many blue golf balls are there?"""]
]
title = "Upword - Models Competition"
description = "This demo compares [BART-Large-CNN](https://huggingface.co/facebook/bart-large-cnn) and [Flan-T5-XXL](https://huggingface.co/google/flan-t5-xxl)."
token = os.environ["token"]
urls = {
'flan-t5': os.environ["url"],
# 'flan-t5': "https://api-inference.huggingface.co/models/philschmid/flan-t5-xxl-sharded-fp16",
'bart-large-cnn': "https://api-inference.huggingface.co/models/facebook/bart-large-cnn"
}
def inference(text):
headers = {"Authorization": f"Bearer {token}"}
payload = {
"inputs": text,
"parameters": {
"min_length": 30,
"max_length": 120,
"do_sample": False
}
}
responses = dict()
for model, url in urls.items():
responses[model] = requests.post(url, headers=headers, json=payload)
output_flan = responses['flan-t5'].json()[0]['generated_text']
output_vanilla = responses['bart-large-cnn'].json()[0]['summary_text']
return [output_flan, output_vanilla]
io = gr.Interface(
inference,
gr.Textbox(lines=3),
outputs=[
gr.Textbox(lines=3, label="Flan T5-XXL"),
gr.Textbox(lines=3, label="BART-Large-CNN")
],
title=title,
description=description,
examples=examples
)
io.launch()