wjm55 commited on
Commit
032494e
·
1 Parent(s): df9c057

Refactor tokenize endpoint to use Pydantic model for request validation and update README.md examples to reflect changes in API usage

Browse files
Files changed (3) hide show
  1. README.md +9 -6
  2. app.py +6 -2
  3. requirements.txt +2 -1
README.md CHANGED
@@ -15,9 +15,10 @@ A simple API for tokenizing text using spaCy.
15
 
16
  ### Using curl
17
  ```bash
18
- curl -X POST "https://theirstory-tokenizer.hf.space:7860/tokenize" \
19
  -H "Content-Type: application/json" \
20
- -d '{"text": "Hello world!"}'
 
21
  ```
22
 
23
  ### Using Python
@@ -25,8 +26,9 @@ curl -X POST "https://theirstory-tokenizer.hf.space:7860/tokenize" \
25
  import requests
26
 
27
  response = requests.post(
28
- "https://theirstory-tokenizer.hf.space:7860/tokenize",
29
- json={"text": "Hello world!"}
 
30
  )
31
  tokens = response.json()
32
  print(tokens)
@@ -34,13 +36,14 @@ print(tokens)
34
 
35
  ### Using JavaScript
36
  ```javascript
37
- const response = await fetch('https://theirstory-tokenizer.hf.space:7860/tokenize', {
38
  method: 'POST',
39
  headers: {
40
  'Content-Type': 'application/json',
 
41
  },
42
  body: JSON.stringify({
43
- text: 'Hello world!'
44
  })
45
  });
46
  const tokens = await response.json();
 
15
 
16
  ### Using curl
17
  ```bash
18
+ curl -X POST "https://theirstory-tokenizer.hf.space/tokenize" \
19
  -H "Content-Type: application/json" \
20
+ -H "accept: application/json" \
21
+ -d '{"text": "Hello world"}'
22
  ```
23
 
24
  ### Using Python
 
26
  import requests
27
 
28
  response = requests.post(
29
+ "https://theirstory-tokenizer.hf.space/tokenize",
30
+ json={"text": "Hello world"},
31
+ headers={"accept": "application/json"}
32
  )
33
  tokens = response.json()
34
  print(tokens)
 
36
 
37
  ### Using JavaScript
38
  ```javascript
39
+ const response = await fetch('https://theirstory-tokenizer.hf.space/tokenize', {
40
  method: 'POST',
41
  headers: {
42
  'Content-Type': 'application/json',
43
+ 'accept': 'application/json'
44
  },
45
  body: JSON.stringify({
46
+ text: 'Hello world'
47
  })
48
  });
49
  const tokens = await response.json();
app.py CHANGED
@@ -1,12 +1,16 @@
1
  from fastapi import FastAPI
 
2
  import spacy
3
 
4
  app = FastAPI()
5
  nlp = spacy.blank("en")
6
 
 
 
 
7
  @app.post("/tokenize")
8
- async def tokenize_text(text: str):
9
- doc = nlp(text)
10
  tokens = []
11
  for token in doc:
12
  tokens.append({
 
1
  from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
  import spacy
4
 
5
  app = FastAPI()
6
  nlp = spacy.blank("en")
7
 
8
+ class TokenizeRequest(BaseModel):
9
+ text: str
10
+
11
  @app.post("/tokenize")
12
+ async def tokenize_text(request: TokenizeRequest):
13
+ doc = nlp(request.text)
14
  tokens = []
15
  for token in doc:
16
  tokens.append({
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  fastapi
2
  uvicorn[standard]
3
  supervision
4
- spacy
 
 
1
  fastapi
2
  uvicorn[standard]
3
  supervision
4
+ spacy
5
+ pydantic