internal-ollama / OpenAIClasses.py
Sirorororo's picture
Add openAI compatibility
aef7a8e
raw
history blame contribute delete
687 Bytes
from typing import List, Optional
from pydantic import BaseModel
class ChatMessage(BaseModel):
role: str
content: str
class ChatCompletionRequest(BaseModel):
model: str = "qwen2.5:14b"
messages: List[ChatMessage]
max_tokens: Optional[int] = 512
temperature: Optional[float] = 0.2
stream: Optional[bool] = False
response_format: Optional[dict] = None
def model_dump(self, *args, **kwargs):
# This overrides the dict method to rename response_format to format when serializing
data = super().model_dump(*args, **kwargs)
if "response_format" in data:
data["format"] = data.pop("response_format")
return data