som11 commited on
Commit
5fd99d8
·
verified ·
1 Parent(s): ebeee50

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +16 -0
  2. app.py +69 -0
  3. requirements.txt +5 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
+ # you will also find guides on how best to write your Dockerfile
3
+
4
+ FROM python:3.9
5
+
6
+ RUN useradd -m -u 1000 user
7
+ USER user
8
+ ENV PATH="/home/user/.local/bin:$PATH"
9
+
10
+ WORKDIR /app
11
+
12
+ COPY --chown=user ./requirements.txt requirements.txt
13
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
14
+
15
+ COPY --chown=user . /app
16
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from dotenv import load_dotenv
3
+ from langchain_groq import ChatGroq
4
+ from langchain_core.prompts import ChatPromptTemplate
5
+ from fastapi import FastAPI
6
+ from fastapi.middleware.cors import CORSMiddleware
7
+ from pydantic import BaseModel
8
+
9
+
10
+ load_dotenv()
11
+
12
+
13
+ groq_api_key = os.getenv('GROQ_API_KEY')
14
+
15
+
16
+ llm_model = ChatGroq(
17
+ groq_api_key=groq_api_key,
18
+ model_name="Llama3-8b-8192"
19
+ )
20
+
21
+
22
+ app = FastAPI()
23
+
24
+
25
+ origins = ["*"]
26
+
27
+
28
+ app.add_middleware(
29
+ CORSMiddleware,
30
+ allow_origins=origins,
31
+ allow_credentials=True,
32
+ allow_methods=["*"],
33
+ allow_headers=["*"],
34
+ )
35
+
36
+
37
+ class textFromFrontendModel(BaseModel):
38
+ textFromNextJSFrontend: str
39
+
40
+
41
+
42
+ @app.get('/')
43
+ def welcome():
44
+ return {
45
+ 'success': True,
46
+ 'message': 'server of "fitbites is up and running successfully '
47
+ }
48
+
49
+
50
+ @app.post('/predict')
51
+ async def predict(incomingTextFromFrontend: textFromFrontendModel):
52
+
53
+ prompt_text = incomingTextFromFrontend.textFromNextJSFrontend
54
+
55
+ prompt_template = ChatPromptTemplate.from_template(
56
+ """
57
+ {text}
58
+ """
59
+ )
60
+
61
+ chain = prompt_template | llm_model
62
+
63
+ response_from_model = chain.invoke({"text": prompt_text})
64
+
65
+ return {
66
+ 'success': True,
67
+ 'response_from_model': response_from_model
68
+ }
69
+
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ fastapi==0.109.2
2
+ langchain_core==0.2.18
3
+ langchain_groq==0.1.6
4
+ pydantic==2.7.2
5
+ python-dotenv==1.0.1