Spaces:
Runtime error
Runtime error
Update test.py
Browse files
test.py
CHANGED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from langchain.llms.base import LLM
|
3 |
+
from typing import Optional, List
|
4 |
+
from pydantic import Field
|
5 |
+
import json
|
6 |
+
|
7 |
+
|
8 |
+
class HumbleBeeLLM(LLM):
|
9 |
+
api_base: str = "https://humblebeeai-llm-host.hf.space"
|
10 |
+
username: str = Field(default=None, exclude=True) # Exclude from Pydantic validation
|
11 |
+
password: str = Field(default=None, exclude=True)
|
12 |
+
token: Optional[str] = None
|
13 |
+
|
14 |
+
def __init__(self, username: str, password: str, **kwargs):
|
15 |
+
super().__init__(**kwargs) # Initialize LangChain's LLM class properly
|
16 |
+
self.username = username
|
17 |
+
self.password = password
|
18 |
+
self.authenticate()
|
19 |
+
|
20 |
+
def authenticate(self):
|
21 |
+
"""Logs in to get the token."""
|
22 |
+
login_payload = {
|
23 |
+
"username": self.username,
|
24 |
+
"password": self.password
|
25 |
+
}
|
26 |
+
|
27 |
+
try:
|
28 |
+
response = requests.post(
|
29 |
+
f"{self.api_base}/login",
|
30 |
+
data=login_payload, # Must be 'data' instead of 'json' for form-urlencoded
|
31 |
+
headers={"Content-Type": "application/x-www-form-urlencoded"}
|
32 |
+
)
|
33 |
+
|
34 |
+
if response.status_code == 200:
|
35 |
+
self.token = response.json().get("access_token")
|
36 |
+
else:
|
37 |
+
print(f"Login failed: {response.status_code} - {response.text}")
|
38 |
+
raise Exception("Failed to authenticate.")
|
39 |
+
|
40 |
+
except requests.exceptions.RequestException as e:
|
41 |
+
raise Exception(f"Error during authentication: {str(e)}")
|
42 |
+
|
43 |
+
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
44 |
+
"""Sends a request to the LLM API and handles streaming responses."""
|
45 |
+
if not self.token:
|
46 |
+
self.authenticate()
|
47 |
+
|
48 |
+
headers = {
|
49 |
+
"Content-Type": "application/json",
|
50 |
+
"Authorization": f"Bearer {self.token}"
|
51 |
+
}
|
52 |
+
payload = {"query": prompt}
|
53 |
+
|
54 |
+
try:
|
55 |
+
response = requests.post(f"{self.api_base}/generate?stream=true", json=payload, headers=headers, stream=True)
|
56 |
+
|
57 |
+
if response.status_code == 200:
|
58 |
+
# Read the streaming response and reconstruct it token by token
|
59 |
+
full_response = ""
|
60 |
+
for line in response.iter_lines():
|
61 |
+
if line:
|
62 |
+
try:
|
63 |
+
# Clean up the streamed JSON line
|
64 |
+
data = json.loads(line.decode("utf-8").replace("data: ", ""))
|
65 |
+
token_content = data.get("content", "")
|
66 |
+
full_response += token_content
|
67 |
+
print(token_content, end="", flush=True) # Print each token as it arrives
|
68 |
+
except json.JSONDecodeError:
|
69 |
+
continue # Ignore invalid JSON lines
|
70 |
+
|
71 |
+
return full_response.strip() # Return the full response after all tokens
|
72 |
+
|
73 |
+
else:
|
74 |
+
print(f"Error {response.status_code}: {response.text}")
|
75 |
+
return f"Error {response.status_code}: {response.text}"
|
76 |
+
|
77 |
+
except requests.exceptions.RequestException as e:
|
78 |
+
return f"Request error: {str(e)}"
|
79 |
+
|
80 |
+
@property
|
81 |
+
def _identifying_params(self):
|
82 |
+
return {"api_base": self.api_base}
|
83 |
+
|
84 |
+
@property
|
85 |
+
def _llm_type(self):
|
86 |
+
return "humblebee-llm"
|
87 |
+
|
88 |
+
|
89 |
+
# ✅ Usage Example
|
90 |
+
llm = HumbleBeeLLM(username="testuser", password="testpassword")
|
91 |
+
|
92 |
+
"""# Invoke with LangChain's API
|
93 |
+
while True:
|
94 |
+
query = input("\n...\n")
|
95 |
+
|
96 |
+
if query.lower() == 'bye': # Case insensitive check
|
97 |
+
print("Goodbye!")
|
98 |
+
break # Exit loop when 'bye' is entered
|
99 |
+
|
100 |
+
response = llm.invoke(query)
|
101 |
+
|
102 |
+
"""
|