Spaces:
Sleeping
Sleeping
import gradio as gr | |
import requests | |
import os | |
# Load API URL and token from environment variables | |
API_URL = os.getenv("HF_API_URL", "https://api-inference.huggingface.co/models/rahul7star/fastai-rahul-text-model-v02") | |
API_TOKEN = os.getenv("HF_API_TOKEN", "your-default-token") # Replace with your actual token for fallback | |
# Function to call the Hugging Face Inference API | |
def call_huggingface_api(input_text): | |
headers = {"Authorization": f"Bearer {API_TOKEN}"} | |
payload = {"inputs": input_text} | |
try: | |
print(f"Request sent to: {API_URL}") | |
print(f"Payload: {payload}") | |
# Make the API call | |
response = requests.post(API_URL, headers=headers, json=payload) | |
print(f"Response Status Code: {response.status_code}") | |
if response.status_code == 200: | |
data = response.json() | |
print(f"Response Data: {data}") | |
# Assuming the model returns 'answer' and 'confidence' | |
return f"Question: {input_text}\nAnswer: {data.get('answer', 'No answer found.')}\nConfidence: {data.get('confidence', 'N/A')}" | |
else: | |
print(f"Error Response: {response.text}") | |
return f"Error: {response.status_code} - {response.text}" | |
except requests.exceptions.RequestException as e: | |
error_message = f"Network error during API call: {e}" | |
print(error_message) | |
return error_message | |
except ValueError as e: | |
error_message = f"Error parsing response JSON: {e}" | |
print(error_message) | |
return error_message | |
except KeyError as e: | |
error_message = f"KeyError: Missing expected key in response JSON: {e}" | |
print(error_message) | |
return error_message | |
except Exception as e: | |
error_message = f"Unexpected error during API call: {e}" | |
print(error_message) | |
return error_message | |
# Example of how you could set up specific queries about you (Rahul7star) | |
def ask_about_rahul7star(input_text): | |
# Example questions about your career, hobbies, and interests | |
predefined_answers = { | |
"Who is rahul7star?": "Rahul7star is a software developer and AI creator based in NSW. He is passionate about coding and AI.", | |
"What does Rahul7star do?": "Rahul7star works as a developer and enjoys solving complex coding problems. He loves traveling and exploring new destinations.", | |
"Tell me about Rahul7star's hobbies?": "Rahul7star enjoys driving scenic routes, having a cold beer after work, and traveling to new destinations, especially to places like Iceland to witness the Northern Lights.", | |
"What is Rahul7star known for?": "He is known for his work in AI, software development, and his ability to solve complex coding challenges." | |
} | |
# Check if the input matches any predefined question | |
if input_text in predefined_answers: | |
return predefined_answers[input_text] | |
else: | |
# If the question isn't predefined, call the Hugging Face model API | |
return call_huggingface_api(input_text) | |
# Gradio Interface for the AI agent | |
gr.Interface( | |
fn=ask_about_rahul7star, | |
inputs="text", | |
outputs="text", | |
examples=[ | |
["Who is rahul7star?"], | |
["What does Rahul7star do?"], | |
["Tell me about Rahul7star's hobbies?"], | |
["What is Rahul7star known for?"] | |
], | |
title="Ask Rahul7star AI", | |
description="Ask questions about Rahul7star and get personalized answers powered by Hugging Face Inference API. Feel free to ask about his career, hobbies, or anything else." | |
).launch() | |