Lhumpal commited on
Commit
01f73f3
·
verified ·
1 Parent(s): 4ea9a07

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -14
app.py CHANGED
@@ -1,24 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  from fastapi import FastAPI, HTTPException
2
  from pydantic import BaseModel
3
  from huggingface_hub import InferenceClient
4
  import os
5
- from datasets import load_dataset
6
- from huggingface_hub import login
7
 
8
  app = FastAPI()
9
 
10
  # Get the token from the environment variable
11
  hf_token = os.environ.get("HF_TOKEN")
12
- # login(token=hf_token)
13
 
14
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=hf_token)
15
- # dataset = load_dataset("Lhumpal/youtube-hunting-beast-transcripts", data_files={"concise": "concise/*", "raw": "raw/*"})
16
- # if dataset:
17
- # texts = []
18
- # for file in dataset["concise"]:
19
- # # Remove newline characters from the 'text' field
20
- # cleaned_text = file['text'].replace('\n', ' ')
21
- # texts.append(cleaned_text)
22
 
23
  class ChatRequest(BaseModel):
24
  message: str
@@ -30,7 +48,7 @@ class ChatRequest(BaseModel):
30
 
31
  class ChatResponse(BaseModel):
32
  response: str
33
-
34
  @app.post("/chat", response_model=ChatResponse)
35
  async def chat(request: ChatRequest):
36
  try:
@@ -53,7 +71,6 @@ async def chat(request: ChatRequest):
53
  token = message.choices[0].delta.content
54
  response += token
55
 
56
- return {"assistant_response": response}
57
-
58
  except Exception as e:
59
  raise HTTPException(status_code=500, detail=str(e))
 
1
+ # from fastapi import FastAPI
2
+ # from fastapi.responses import JSONResponse
3
+ # from fastapi import Request
4
+ # from huggingface_hub import InferenceClient
5
+
6
+ # app = FastAPI()
7
+
8
+ # @app.post("/")
9
+ # async def greet_json(request: Request):
10
+ # input_data = await request.json()
11
+ # # number = input_data.get("number")
12
+
13
+ # # tripled_number = number * 2
14
+ # # return {"message": f"Your input number is: {number}, your doubled number is: {tripled_number}"}
15
+ # user_input = input_data.get("user_input")
16
+
17
+ # client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
18
+ # # Get the response from the model
19
+ # response = client(user_input)
20
+
21
+ # # assistant_response = client.text_generation(user_input)
22
+ # assistant_response = "I am assistant."
23
+ # return {"assistant_message": f"Your input message is: {user_input}, assistant_response is: {response}"}
24
  from fastapi import FastAPI, HTTPException
25
  from pydantic import BaseModel
26
  from huggingface_hub import InferenceClient
27
  import os
 
 
28
 
29
  app = FastAPI()
30
 
31
  # Get the token from the environment variable
32
  hf_token = os.environ.get("HF_TOKEN")
 
33
 
34
+ if hf_token:
35
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=hf_token)
36
+ else:
37
+ raise ValueError("HF_TOKEN environment variable not set. Please add it as a secret in your Hugging Face Space.")
38
+
39
+ # Rest of your code...
 
 
40
 
41
  class ChatRequest(BaseModel):
42
  message: str
 
48
 
49
  class ChatResponse(BaseModel):
50
  response: str
51
+
52
  @app.post("/chat", response_model=ChatResponse)
53
  async def chat(request: ChatRequest):
54
  try:
 
71
  token = message.choices[0].delta.content
72
  response += token
73
 
74
+ return {"response": response}
 
75
  except Exception as e:
76
  raise HTTPException(status_code=500, detail=str(e))