Spaces:
Runtime error
Runtime error
File size: 490 Bytes
78e119e 8b64a94 690145e 78e119e 8b64a94 63464ea 8b64a94 63464ea 8b64a94 63464ea 8b64a94 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 |
from fastapi import FastAPI
from typing import List
from vllm import LLM, SamplingParams
app = FastAPI()
@app.get("/llm_inference")
def read_root(
prompts: List[str],
model: str = "meta-llama/Llama-2-7b-hf",
temperature: float = 0.,
max_tokens: int = 1024) -> List:
sampling_params = SamplingParams(temperature=temperature, max_tokens=max_tokens)
llm = LLM(model=model)
response = llm.generate(prompts, sampling_params)
return response
|