Spaces:
Sleeping
Sleeping
import gradio as gr | |
import requests | |
import os | |
# Load API URL and token from environment variables | |
API_URL = os.getenv("HF_API_URL", "https://api-inference.huggingface.co/models/your-model") | |
API_TOKEN = os.getenv("HF_API_TOKEN", "your-default-token") # Replace with your actual token for fallback | |
# Function to call the Hugging Face Inference API | |
def call_huggingface_api(input_text): | |
headers = {"Authorization": f"Bearer {API_TOKEN}"} | |
payload = {"inputs": input_text} | |
try: | |
response = requests.post(API_URL, headers=headers, json=payload) | |
print(f"Request sent to: {API_URL}") | |
print(f"Payload: {payload}") | |
print(f"Response Status Code: {response.status_code}") | |
if response.status_code == 200: | |
data = response.json() | |
return f"Question: {input_text}\nAnswer: {data.get('answer', 'No answer found.')}\nConfidence: {data.get('confidence', 'N/A')}" | |
else: | |
print(f"Error Response: {response.text}") | |
return f"Error: {response.status_code} - {response.text}" | |
except Exception as e: | |
return f"Error during API call: {e}" | |
# Gradio Interface | |
gr.Interface( | |
fn=call_huggingface_api, | |
inputs="text", | |
outputs="text", | |
examples=[ | |
["Who is rahul7star?"], | |
["What does Rahul7star do?"], | |
["Tell me about Rahul7star"] | |
], | |
title="Ask Rahul7star AI", | |
description="Ask questions about rahul7star and get answers powered by Hugging Face Inference API." | |
).launch() | |