Spaces:
Sleeping
Sleeping
import requests | |
import sys | |
''' | |
# Example usage: | |
endpoint = "https://api-inference.huggingface.co/models/gpt2" | |
status_info = check_endpoint(endpoint) | |
if status_info["status"]: | |
print("Success:", status_info["message"]) | |
else: | |
print("Error:", status_info["message"]) | |
print("Status Code:", status_info["status_code"]) | |
print("Response Data:", status_info["response_data"]) | |
''' | |
def check_public_endpoint(endpoint: str): | |
""" | |
Checks the given endpoint and provides a detailed status and message. | |
Args: | |
endpoint (str): The URL of the endpoint to check. | |
Returns: | |
dict: Contains status (True/False) and a message explaining the result. | |
""" | |
result = { | |
"status": False, # Default status is failure | |
"message": "Unknown error", # Default message | |
"status_code": None, | |
"response_data": None | |
} | |
try: # No Authorization header required for public models | |
response = requests.get(endpoint) | |
result["status_code"] = response.status_code | |
result["response_data"] = response.text | |
if response.status_code == 503: | |
result["status"] = True | |
result["message"] = "Endpoint is reachable and returned a service unavailable response." | |
if response.status_code == 200: | |
result["status"] = True | |
result["message"] = "Endpoint is reachable and returned a valid response." | |
else: | |
result["message"] = f"Request failed with status code {response.status_code}. Response: {response.text}" | |
except requests.exceptions.RequestException as e: | |
result["message"] = f"Request failed with exception: {e}" | |
return result | |
''' | |
# Check if the response status code is 200 and it returns inference data | |
if response.status_code == 200: | |
# Public models will return inference data without needing an API key | |
# Attempt to parse JSON response | |
response_json = response.json() | |
# Print the first few keys of the response JSON for debugging | |
print(f"Response JSON keys: {list(response_json.keys())[:5]}") | |
# Public models will return inference data without needing an API key | |
if "model" in response_json or "error" in response_json: | |
return True | |
else: | |
print("The response does not contain inference-related data.") | |
return False | |
''' | |
def is_huggingface_endpoint(endpoint: str): | |
try: | |
headers = {"Authorization": "Bearer YOUR_HUGGINGFACE_API_KEY"} | |
response = requests.get(endpoint, headers=headers) | |
if response.status_code == 200 and "model" in response.json(): | |
return True | |
else: | |
print("This is NOT a Hugging Face Inference Endpoint.") | |
return False | |
except requests.exceptions.RequestException as e: | |
print(f"Request failed: {e}") | |
return False | |