File size: 3,094 Bytes
5c5a1ae 3fd0067 740846d 5c5a1ae baface3 5c5a1ae 3fd0067 5c5a1ae 5fced44 5c5a1ae 5fced44 5c5a1ae 5fced44 5c5a1ae 9b324d1 5c5a1ae 9b324d1 5c5a1ae 9b324d1 5c5a1ae 5fced44 5c5a1ae 3fd0067 bdfd7a5 5c5a1ae 5f3d5cb 5c5a1ae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
import base64
import os
import gradio as gr
from google import genai
from google.genai import types
def generate_response(user_input):
try:
client = genai.Client(
api_key=os.environ.get("GEMINI_API_KEY"),
)
model = "gemini-2.5-pro-exp-03-25"
contents = [
types.Content(
role="user",
parts=[
types.Part.from_text(text=user_input),
],
),
]
generate_content_config = types.GenerateContentConfig(
temperature=2,
response_mime_type="text/plain",
)
full_response = ""
for chunk in client.models.generate_content_stream(
model=model,
contents=contents,
config=generate_content_config,
):
if chunk.text:
full_response += chunk.text
yield full_response # Yield intermediate results for streaming effect
return full_response
except Exception as e:
return f"An error occurred: {str(e)}"
# Custom CSS for better appearance
css = """
.gradio-container {
max-width: 800px;
margin: auto;
}
footer {
visibility: hidden;
}
"""
# Create the Gradio interface
with gr.Blocks(css=css, title="Gemini AI Chat") as demo:
gr.Markdown("# π Gemini AI Chat Interface")
gr.Markdown("Enter your prompt and get AI-generated responses from Google's Gemini model")
with gr.Row():
with gr.Column(scale=7):
user_input = gr.Textbox(
label="Your Prompt",
placeholder="Type your message here...",
lines=5,
max_lines=10,
interactive=True,
container=False,
)
with gr.Column(scale=3):
temperature = gr.Slider(
minimum=0.1,
maximum=2.0,
value=1.0,
step=0.1,
label="Creativity (Temperature)",
interactive=True,
)
submit_btn = gr.Button("Generate Response", variant="primary")
clear_btn = gr.Button("Clear", variant="secondary")
output = gr.Textbox(
label="AI Response",
placeholder="AI response will appear here...",
lines=10,
max_lines=20,
interactive=False,
)
examples = gr.Examples(
examples=[
["Explain quantum computing in simple terms"],
["Write a short poem about artificial intelligence"],
["What are the latest advancements in renewable energy?"],
],
inputs=user_input,
label="Example Prompts",
)
# Event handlers
submit_btn.click(
fn=generate_response,
inputs=user_input,
outputs=output,
api_name="generate"
)
clear_btn.click(
fn=lambda: ("", ""),
inputs=[],
outputs=[user_input, output],
)
# Run the app
if __name__ == "__main__":
demo.queue().launch(server_port=7860, share=False) |