llm_host / Dockerfile
Bahodir Nematjonov
debuging docker
93eb9c9
raw
history blame
697 Bytes
# Use the official Python 3.9 image
FROM python:3.9
# Set the working directory
WORKDIR /code
# Copy requirements.txt and install dependencies
COPY ./requirements.txt /code/requirements.txt
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
# Install Ollama
RUN curl -fsSL https://ollama.com/install.sh | sh
# Create a writable Ollama directory for the non-root user
RUN mkdir -p /home/user/.ollama && chmod -R 777 /home/user/.ollama
# Set environment variables to fix permission issue
ENV OLLAMA_HOME=/home/user/.ollama
# Expose FastAPI's port
EXPOSE 7860
# Start Ollama in the background and then run FastAPI
CMD ollama serve & uvicorn main:app --host 0.0.0.0 --port 7860