Spaces:
Build error
Build error
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
3 |
+
|
4 |
+
# Load DeepSeek model
|
5 |
+
deepseek_tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-R1")
|
6 |
+
deepseek_model = AutoModelForCausalLM.from_pretrained("deepseek-ai/DeepSeek-R1", torch_dtype="auto", device_map="auto")
|
7 |
+
deepseek_pipe = pipeline("text-generation", model=deepseek_model, tokenizer=deepseek_tokenizer)
|
8 |
+
|
9 |
+
# Load LLaMA model
|
10 |
+
llama_tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-4-Scout-17B-16E-Instruct")
|
11 |
+
llama_model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-4-Scout-17B-16E-Instruct", torch_dtype="auto", device_map="auto")
|
12 |
+
llama_pipe = pipeline("text-generation", model=llama_model, tokenizer=llama_tokenizer)
|
13 |
+
|
14 |
+
def generate_and_enhance_code(code_request: str, features: str):
|
15 |
+
# Step 1: Generate base code from DeepSeek
|
16 |
+
base_output = deepseek_pipe(code_request, max_new_tokens=512, do_sample=True, temperature=0.7)[0]["generated_text"]
|
17 |
+
|
18 |
+
# Step 2: Ask Llama to add features
|
19 |
+
enhancement_prompt = f"Hey Llama! can you please add some more features in my code?\n\nOriginal code:\n{base_output}\n\nFeatures to add:\n{features}\n\nAdd the features and pass me the code without any extra asking!"
|
20 |
+
enhanced_output = llama_pipe(enhancement_prompt, max_new_tokens=1024, do_sample=True, temperature=0.6)[0]["generated_text"]
|
21 |
+
|
22 |
+
return enhanced_output
|
23 |
+
|
24 |
+
with gr.Blocks() as demo:
|
25 |
+
gr.Markdown("## AI Code Assistant with DeepSeek + LLaMA 4")
|
26 |
+
with gr.Row():
|
27 |
+
code_input = gr.Textbox(lines=5, label="What code do you want?")
|
28 |
+
feature_input = gr.Textbox(lines=3, label="What features should LLaMA add?")
|
29 |
+
submit_btn = gr.Button("Generate & Enhance Code")
|
30 |
+
output_box = gr.Code(label="Enhanced Code")
|
31 |
+
|
32 |
+
submit_btn.click(fn=generate_and_enhance_code, inputs=[code_input, feature_input], outputs=output_box)
|
33 |
+
|
34 |
+
demo.launch()
|