llm_host / Dockerfile
Bahodir Nematjonov
debuging
bdf1c9b
raw
history blame
673 Bytes
# Use the official Python 3.9 image
FROM python:3.9
# Set working directory
WORKDIR /code
# Copy requirements.txt and install dependencies
COPY ./requirements.txt /code/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
# Install Ollama
RUN curl -fsSL https://ollama.com/install.sh | sh
# Create a writable directory for Ollama
RUN mkdir -p /home/user/.ollama && chmod -R 777 /home/user/.ollama
# Set Ollama to use this directory
ENV OLLAMA_DIR=/home/user/.ollama
# Expose the FastAPI port
EXPOSE 7860
# Start Ollama in the background and then FastAPI
CMD ollama serve --dir $OLLAMA_DIR & uvicorn main:app --host 0.0.0.0 --port 7860