Spaces:
Sleeping
Sleeping
check_public_endpoint
Browse files- checks/endpoint_check.py +47 -16
checks/endpoint_check.py
CHANGED
@@ -1,17 +1,53 @@
|
|
1 |
import requests
|
2 |
import sys
|
3 |
|
4 |
-
|
|
|
|
|
|
|
5 |
|
6 |
-
|
7 |
-
|
8 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
response = requests.get(endpoint)
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
# Check if the response status code is 200 and it returns inference data
|
16 |
if response.status_code == 200:
|
17 |
# Public models will return inference data without needing an API key
|
@@ -26,13 +62,8 @@ def is_public_endpoint(endpoint: str):
|
|
26 |
else:
|
27 |
print("The response does not contain inference-related data.")
|
28 |
return False
|
29 |
-
|
30 |
-
|
31 |
-
return False
|
32 |
-
except requests.exceptions.RequestException as e:
|
33 |
-
print(f"Request failed: {e}")
|
34 |
-
return False
|
35 |
-
|
36 |
|
37 |
|
38 |
def is_huggingface_endpoint(endpoint: str):
|
|
|
1 |
import requests
|
2 |
import sys
|
3 |
|
4 |
+
'''
|
5 |
+
# Example usage:
|
6 |
+
endpoint = "https://api-inference.huggingface.co/models/gpt2"
|
7 |
+
status_info = check_endpoint(endpoint)
|
8 |
|
9 |
+
if status_info["status"]:
|
10 |
+
print("Success:", status_info["message"])
|
11 |
+
else:
|
12 |
+
print("Error:", status_info["message"])
|
13 |
+
print("Status Code:", status_info["status_code"])
|
14 |
+
print("Response Data:", status_info["response_data"])
|
15 |
+
'''
|
16 |
+
|
17 |
+
def check_public_endpoint(endpoint: str):
|
18 |
+
"""
|
19 |
+
Checks the given endpoint and provides a detailed status and message.
|
20 |
+
|
21 |
+
Args:
|
22 |
+
endpoint (str): The URL of the endpoint to check.
|
23 |
+
|
24 |
+
Returns:
|
25 |
+
dict: Contains status (True/False) and a message explaining the result.
|
26 |
+
"""
|
27 |
+
result = {
|
28 |
+
"status": False, # Default status is failure
|
29 |
+
"message": "Unknown error", # Default message
|
30 |
+
"status_code": None,
|
31 |
+
"response_data": None
|
32 |
+
}
|
33 |
+
|
34 |
+
try: # No Authorization header required for public models
|
35 |
response = requests.get(endpoint)
|
36 |
+
result["status_code"] = response.status_code
|
37 |
+
result["response_data"] = response.text
|
38 |
+
|
39 |
+
if response.status_code == 200:
|
40 |
+
result["status"] = True
|
41 |
+
result["message"] = "Endpoint is reachable and returned a valid response."
|
42 |
+
else:
|
43 |
+
result["message"] = f"Request failed with status code {response.status_code}. Response: {response.text}"
|
44 |
+
|
45 |
+
except requests.exceptions.RequestException as e:
|
46 |
+
result["message"] = f"Request failed with exception: {e}"
|
47 |
+
|
48 |
+
return result
|
49 |
+
|
50 |
+
'''
|
51 |
# Check if the response status code is 200 and it returns inference data
|
52 |
if response.status_code == 200:
|
53 |
# Public models will return inference data without needing an API key
|
|
|
62 |
else:
|
63 |
print("The response does not contain inference-related data.")
|
64 |
return False
|
65 |
+
|
66 |
+
'''
|
|
|
|
|
|
|
|
|
|
|
67 |
|
68 |
|
69 |
def is_huggingface_endpoint(endpoint: str):
|