som11 commited on
Commit
893a4c6
·
verified ·
1 Parent(s): 389ff31

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +14 -0
  2. app.py +58 -0
  3. requirements.txt +4 -0
Dockerfile ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
+ # you will also find guides on how best to write your Dockerfile
3
+
4
+ FROM python:3.9.7
5
+
6
+ WORKDIR /code
7
+
8
+ COPY ./requirements.txt /code/requirements.txt
9
+
10
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
11
+
12
+ COPY . .
13
+
14
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
2
+ from dotenv import load_dotenv
3
+ from fastapi import FastAPI
4
+ from fastapi.middleware.cors import CORSMiddleware
5
+ from pydantic import BaseModel
6
+ import os
7
+
8
+
9
+ load_dotenv()
10
+
11
+
12
+ os.environ["HF_TOKEN"] = os.getenv('HF_TOKEN')
13
+
14
+
15
+ app = FastAPI()
16
+
17
+
18
+ origins = ["*"]
19
+
20
+
21
+ app.add_middleware(
22
+ CORSMiddleware,
23
+ allow_origins=origins,
24
+ allow_credentials=True,
25
+ allow_methods=["*"],
26
+ allow_headers=["*"],
27
+ )
28
+
29
+
30
+ class LanguageTextModel(BaseModel):
31
+ languageText: str
32
+ sourceLanguageCode: str
33
+ targetLanguageCode: str
34
+
35
+
36
+ @app.get('/')
37
+ def welcome():
38
+ return {
39
+ 'success': True,
40
+ 'message': 'server of "nllb language translator" is up and running successfully '
41
+ }
42
+
43
+
44
+ @app.post('/translate')
45
+ async def translate_text(input: LanguageTextModel):
46
+
47
+ model = AutoModelForSeq2SeqLM.from_pretrained("facebook/nllb-200-distilled-600M")
48
+
49
+ tokenizer = AutoTokenizer.from_pretrained("facebook/nllb-200-distilled-600M")
50
+
51
+ translator = pipeline('translation', model=model, tokenizer=tokenizer, src_lang=input.sourceLanguageCode, tgt_lang=input.targetLanguageCode, max_length=400)
52
+
53
+ response = translator('I am not feeling well')
54
+
55
+ return {
56
+ "success": True,
57
+ "translated_text": response[0]['translation_text']
58
+ }
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ fastapi
2
+ pydantic
3
+ python-dotenv
4
+ transformers[torch]