Spaces:
Runtime error
Runtime error
File size: 3,643 Bytes
a081ff4 f0a45da 321a2ed a081ff4 07fa6b8 a3e6518 a081ff4 6842806 ea72ee8 ced24dc a081ff4 321a2ed f0a45da 321a2ed 07fa6b8 321a2ed 07fa6b8 f0a45da 321a2ed f0a45da a081ff4 23a6073 f0a45da 321a2ed f0a45da 6842806 f0a45da 321a2ed f0a45da 321a2ed f0a45da a081ff4 23a6073 f0a45da 321a2ed a3e6518 f0a45da 07fa6b8 a3e6518 07fa6b8 a3e6518 321a2ed 23a6073 321a2ed f0a45da |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 |
import gradio as gr
from rich.console import Console
from rich.syntax import Syntax
from transformers import AutoModelForCausalLM, AutoTokenizer
import requests
import json
import webbrowser
# model_name = "flax-community/gpt-code-clippy-1.3B-apps-alldata"
model_name = "flax-community/gpt-code-clippy-125M-apps-alldata"
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
tokenizer.pad_token = tokenizer.eos_token
console = Console(record=True)
def format_input(question, starter_code=""):
answer_type = (
"\
Use Call-Based format\
" if starter_code else "\
Use Standard Input format\
"
)
return f"\
QUESTION:\
{question}\
{starter_code}\
{answer_type}\
ANSWER:\
"
def format_outputs(text):
formatted_text = Syntax(
text, "python", line_numbers=True, indent_guides=True, word_wrap=True
)
console.print(formatted_text)
return console.export_html(inline_styles=True)
def generate_solution(question, starter_code="", temperature=1.0, num_beams=1):
prompt = format_input(question, starter_code)
input_ids = tokenizer(prompt, return_tensors="pt").input_ids
start = len(input_ids[0])
output = model.generate(
input_ids,
max_length=start + 200,
do_sample=True,
top_p=0.95,
pad_token_id=tokenizer.pad_token_id,
early_stopping=True,
temperature=temperature,
num_beams=int(num_beams),
no_repeat_ngram_size=None,
repetition_penalty=None,
num_return_sequences=None,
)
return format_outputs(
tokenizer.decode(output[0][start:], skip_special_tokens=True).strip()
)
_EXAMPLES = [
[
"""
Given a 2D list of size `m * n`. Your task is to find the sum of minimum value in each row.
For Example:
```python
[
[1, 2, 3, 4, 5], # minimum value of row is 1
[5, 6, 7, 8, 9], # minimum value of row is 5
[20, 21, 34, 56, 100] # minimum value of row is 20
]
```
So, the function should return `26` because sum of minimums is as `1 + 5 + 20 = 26`
""",
"",
0.8,
],
[
"""
# Personalized greeting
Create a function that gives a personalized greeting. This function takes two parameters: `name` and `owner`.
""",
"""
Use conditionals to return the proper message:
case| return
--- | ---
name equals owner | 'Hello boss'
otherwise | 'Hello guest'
def greet(name, owner):
""",
0.8,
],
]
inputs = [
gr.inputs.Textbox(placeholder="Define a problem here...", lines=7),
gr.inputs.Textbox(placeholder="Provide optional starter code...", lines=3),
gr.inputs.Slider(0.5, 1.5, 0.1, default=0.8, label="Temperature"),
gr.inputs.Slider(1, 4, 1, default=1, label="Beam size"),
gr.inputs.Textbox(lines=1, label="Your GitHub API token")
]
# adding carbon support
GITHUB_API="https://api.github.com"
API_TOKEN=gr.inputs.Textbox(label="Your GitHub API token")
#form a request URL
url=GITHUB_API+"/gists"
#print headers,parameters,payload
headers={'Authorization':'token %s'%API_TOKEN}
params={'scope':'gist'}
payload={inputs}
res=requests.post(url,headers=headers,params=params,data=json.dumps(payload))
col = st.beta_columns([2, 4])
if col.button("Create a 'carbon' copy"):
carbon_url='https://carbon.now.sh/'+res.text.split(',')[0].split('/')[-1][:-1]
webbrowser.open_new(carbon_url)
outputs = [gr.outputs.HTML(label="Solution")]
gr.Interface(
generate_solution,
inputs=inputs,
outputs=outputs,
title="Code Clippy: Problem Solver",
examples=_EXAMPLES,
).launch(share=False)
|