Spaces:
Sleeping
Sleeping
File size: 2,971 Bytes
d767aea dfbdd52 2942c32 dfbdd52 2942c32 dfbdd52 e7956db dfbdd52 2942c32 8badbdb 6a3eb68 8badbdb 6a3eb68 4aba49e 6a3eb68 2942c32 dfbdd52 2942c32 d767aea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 |
import requests
import sys
'''
# Example usage:
endpoint = "https://api-inference.huggingface.co/models/gpt2"
status_info = check_endpoint(endpoint)
if status_info["status"]:
print("Success:", status_info["message"])
else:
print("Error:", status_info["message"])
print("Status Code:", status_info["status_code"])
print("Response Data:", status_info["response_data"])
'''
def check_public_endpoint(endpoint: str):
"""
Checks the given endpoint and provides a detailed status and message.
Args:
endpoint (str): The URL of the endpoint to check.
Returns:
dict: Contains status (True/False) and a message explaining the result.
"""
result = {
"status": False, # Default status is failure
"message": "Unknown error", # Default message
"status_code": None,
"response_data": None
}
try: # No Authorization header required for public models
response = requests.get(endpoint)
result["status_code"] = response.status_code
result["response_data"] = response.text
if response.status_code == 503:
result["status"] = True
result["message"] = "Endpoint is reachable and returned a service unavailable response."
if response.status_code == 200:
result["status"] = True
result["message"] = "Endpoint is reachable and returned a valid response."
else:
result["message"] = f"Request failed with status code {response.status_code}. Response: {response.text}"
except requests.exceptions.RequestException as e:
result["message"] = f"Request failed with exception: {e}"
return result
'''
# Check if the response status code is 200 and it returns inference data
if response.status_code == 200:
# Public models will return inference data without needing an API key
# Attempt to parse JSON response
response_json = response.json()
# Print the first few keys of the response JSON for debugging
print(f"Response JSON keys: {list(response_json.keys())[:5]}")
# Public models will return inference data without needing an API key
if "model" in response_json or "error" in response_json:
return True
else:
print("The response does not contain inference-related data.")
return False
'''
def is_huggingface_endpoint(endpoint: str):
try:
headers = {"Authorization": "Bearer YOUR_HUGGINGFACE_API_KEY"}
response = requests.get(endpoint, headers=headers)
if response.status_code == 200 and "model" in response.json():
return True
else:
print("This is NOT a Hugging Face Inference Endpoint.")
return False
except requests.exceptions.RequestException as e:
print(f"Request failed: {e}")
return False
|