Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -40,12 +40,13 @@ from transformers import AutoTokenizer
|
|
40 |
|
41 |
app = FastAPI()
|
42 |
|
43 |
-
|
44 |
# Load dataset and tokenizer
|
45 |
billsum = load_dataset("billsum", split="ca_test") # Load a small sample
|
46 |
tokenizer = AutoTokenizer.from_pretrained("t5-small")
|
47 |
prefix = "summarize: " # Example prefix for text generation
|
48 |
|
|
|
49 |
def preprocess_function(examples):
|
50 |
inputs = [prefix + doc for doc in examples["text"]]
|
51 |
model_inputs = tokenizer(inputs, max_length=1024, truncation=True)
|
|
|
40 |
|
41 |
app = FastAPI()
|
42 |
|
43 |
+
#@app.get("/")
|
44 |
# Load dataset and tokenizer
|
45 |
billsum = load_dataset("billsum", split="ca_test") # Load a small sample
|
46 |
tokenizer = AutoTokenizer.from_pretrained("t5-small")
|
47 |
prefix = "summarize: " # Example prefix for text generation
|
48 |
|
49 |
+
@app.get("/")
|
50 |
def preprocess_function(examples):
|
51 |
inputs = [prefix + doc for doc in examples["text"]]
|
52 |
model_inputs = tokenizer(inputs, max_length=1024, truncation=True)
|