File size: 5,156 Bytes
aa6f369
 
 
8531a26
aa6f369
 
 
8531a26
aa6f369
 
 
8531a26
 
aa6f369
8531a26
f5a64b7
aa6f369
 
 
 
 
 
 
 
8531a26
aa6f369
 
 
8531a26
 
 
 
 
 
aa6f369
 
8531a26
 
aa6f369
 
8531a26
aa6f369
 
8531a26
 
 
aa6f369
 
 
f5a64b7
aa6f369
 
 
 
 
 
 
8531a26
 
 
f5a64b7
8531a26
aa6f369
 
8531a26
f5a64b7
8531a26
 
 
 
 
 
 
f5a64b7
8531a26
aa6f369
 
8531a26
aa6f369
8531a26
 
 
aa6f369
8531a26
 
 
 
 
 
aa6f369
 
 
 
 
 
8531a26
aa6f369
 
 
8531a26
 
 
 
aa6f369
8531a26
aa6f369
 
8531a26
 
 
 
 
 
aa6f369
8531a26
aa6f369
 
8531a26
f5a64b7
 
8531a26
 
aa6f369
8531a26
 
aa6f369
8531a26
 
aa6f369
 
 
8531a26
 
6541c57
f5a64b7
aa6f369
8531a26
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import gradio as gr
from huggingface_hub import InferenceClient
import os
import re

# --- Configuration ---
API_TOKEN = os.getenv("HF_TOKEN", None)
MODEL = "Qwen/Qwen2.5-Coder-32B-Instruct"

# --- Initialize Inference Client ---
try:
    print(f"Initializing Inference Client for model: {MODEL}")
    client = InferenceClient(model=MODEL, token=API_TOKEN) if API_TOKEN else InferenceClient(model=MODEL)
except Exception as e:
    raise gr.Error(f"Failed to initialize model client. Error: {e}")

# --- Core Code Generation Function ---
def generate_code(
    prompt: str,
    backend_choice: str,
    max_tokens: int,
    temperature: float,
    top_p: float,
):
    print(f"Generating code for: {prompt[:100]}... | Backend: {backend_choice}")

    # --- Dynamically Build System Message ---
    system_message = (
        "you are an ai that is supposed to generate websites, you must not say anything except giving code , "
        "user can select backend like static , flask , nodejs only , you should always keep the website sfw and minimal errors, "
        "you must create an index.html following the user prompt, "
        "if the user asks you create an code that's not about an website you should say "
        "'hey there! am here to create websites for you unfortunately am programmed to not create codes! otherwise I would go on the naughty list :-(', "
        "your code always must have no useless comments you should only add comments where users are required to modify the code."
    )

    user_prompt = f"USER_PROMPT = {prompt}\nUSER_BACKEND = {backend_choice}"

    messages = [
        {"role": "system", "content": system_message},
        {"role": "user", "content": user_prompt}
    ]

    response_stream = ""
    full_response = ""

    try:
        stream = client.chat_completion(
            messages=messages,
            max_tokens=max_tokens,
            stream=True,
            temperature=temperature,
            top_p=top_p,
        )
        for message in stream:
            token = message.choices[0].delta.content
            if isinstance(token, str):
                response_stream += token
                full_response += token
                yield response_stream

        cleaned_response = full_response.strip()
        cleaned_response = re.sub(r"^\s*```[a-z]*\s*\n?", "", cleaned_response)
        cleaned_response = re.sub(r"\n?\s*```\s*$", "", cleaned_response)
        cleaned_response = re.sub(r"<\s*\|?\s*(user|assistant)\s*\|?\s*>", "", cleaned_response, flags=re.IGNORECASE)

        common_phrases = [
            "Here is the code:", "Okay, here is the code:", "Here's the code:",
            "Sure, here is the code you requested:", "Let me know if you need anything else."
        ]
        for phrase in common_phrases:
            if cleaned_response.lower().startswith(phrase.lower()):
                cleaned_response = cleaned_response[len(phrase):].lstrip()

        yield cleaned_response.strip()

    except Exception as e:
        yield f"## Error\n\nFailed to generate code.\n**Reason:** {e}"

# --- Build Gradio Interface ---
with gr.Blocks(css=".gradio-container { max-width: 90% !important; }") as demo:
    gr.Markdown("# ✨ Website Code Generator ✨")
    gr.Markdown(
        "Describe the website you want. The AI will generate a **single-file** `index.html` website.\n\n"
        "**Rules:**\n"
        "- Backend hint (Static / Flask / Node.js).\n"
        "- Always fully SFW and minimal errors.\n"
        "- Only generates websites. No other codes.\n"
        "- Minimal necessary comments only."
    )

    with gr.Row():
        with gr.Column(scale=2):
            prompt_input = gr.Textbox(
                label="Website Description",
                placeholder="e.g., A simple landing page with a hero section and contact form.",
                lines=6,
            )
            backend_radio = gr.Radio(
                ["Static", "Flask", "Node.js"],
                label="Backend Context",
                value="Static",
                info="Hint only. Always generates only index.html."
            )
            generate_button = gr.Button("✨ Generate Website Code", variant="primary")

        with gr.Column(scale=3):
            code_output = gr.Code(
                label="Generated index.html",
                language="html",
                lines=30,
                interactive=False,
            )

    with gr.Accordion("Advanced Settings", open=False):
        max_tokens_slider = gr.Slider(
            minimum=512,
            maximum=4096,
            value=3072,
            step=256,
            label="Max New Tokens"
        )
        temperature_slider = gr.Slider(
            minimum=0.1, maximum=1.2, value=0.7, step=0.1, label="Temperature"
        )
        top_p_slider = gr.Slider(
            minimum=0.1, maximum=1.0, value=0.9, step=0.05, label="Top-P"
        )

    generate_button.click(
        fn=generate_code,
        inputs=[prompt_input, backend_radio, max_tokens_slider, temperature_slider, top_p_slider],
        outputs=code_output,
    )

if __name__ == "__main__":
    demo.queue(max_size=10).launch()