geqintan commited on
Commit
1868dc4
·
1 Parent(s): 6c92513
Files changed (1) hide show
  1. app.py +8 -8
app.py CHANGED
@@ -28,8 +28,8 @@ except Exception as e:
28
  logger.error(f"Failed to load model: {e}")
29
  raise HTTPException(status_code=500, detail="Model loading failed")
30
 
31
- # class EmbeddingRequest(BaseModel):
32
- # input: str = Field(..., min_length=1, max_length=1000)
33
 
34
  @app.post("/v1/embeddings")
35
  async def embeddings(request, authorization: str = Depends(check_authorization)):
@@ -37,10 +37,10 @@ async def embeddings(request, authorization: str = Depends(check_authorization))
37
  # logger.info("Received request for embeddings")
38
  # return '2222222222'
39
  # return request.input
40
- input_texts = request.input
41
 
42
  try:
43
- if not input_texts:
44
  return {
45
  "object": "list",
46
  "data": [],
@@ -52,8 +52,8 @@ async def embeddings(request, authorization: str = Depends(check_authorization))
52
  }
53
 
54
  # Calculate embeddings
55
- # embeddings = model.encode(input_text)
56
- embeddings_1 = model.encode(input_texts, normalize_embeddings=True)
57
 
58
  # Format the embeddings in OpenAI compatible format
59
  data = {
@@ -67,8 +67,8 @@ async def embeddings(request, authorization: str = Depends(check_authorization))
67
  ],
68
  "model": "BAAI/bge-large-zh-v1.5",
69
  "usage": {
70
- "prompt_tokens": len(input_texts),
71
- "total_tokens": len(input_texts)
72
  }
73
  }
74
 
 
28
  logger.error(f"Failed to load model: {e}")
29
  raise HTTPException(status_code=500, detail="Model loading failed")
30
 
31
+ class EmbeddingRequest(BaseModel):
32
+ input: str | list[str]
33
 
34
  @app.post("/v1/embeddings")
35
  async def embeddings(request, authorization: str = Depends(check_authorization)):
 
37
  # logger.info("Received request for embeddings")
38
  # return '2222222222'
39
  # return request.input
40
+ input = request.input
41
 
42
  try:
43
+ if not input:
44
  return {
45
  "object": "list",
46
  "data": [],
 
52
  }
53
 
54
  # Calculate embeddings
55
+ # embeddings = model.encode(input)
56
+ embeddings = model.encode(input, normalize_embeddings=True)
57
 
58
  # Format the embeddings in OpenAI compatible format
59
  data = {
 
67
  ],
68
  "model": "BAAI/bge-large-zh-v1.5",
69
  "usage": {
70
+ "prompt_tokens": len(input),
71
+ "total_tokens": len(input)
72
  }
73
  }
74