ollama-server / Dockerfile
zyxciss's picture
Update Dockerfile
30a3a0e verified
# Builder stage
FROM ubuntu:22.04
# Install necessary packages
RUN apt-get update && apt-get install -y \
curl \
wget \
nano \
vim \
neovim \
iputils-ping \
git \
sudo \
procps \
coreutils \
openssh-client \
&& apt-get clean
# Install SSHX
RUN curl -sSf https://sshx.io/get | sh
# Add NVIDIA package repositories
# Install NVIDIA container toolkit (Check for any updated methods or URLs for Ubuntu jammy)
# Install application
RUN curl https://ollama.ai/install.sh | sh
# Below is to fix embedding bug as per
# RUN curl -fsSL https://ollama.com/install.sh | sed 's#https://ollama.com/download#https://github.com/jmorganca/ollama/releases/download/v0.1.29#' | sh
# Create the directory and give appropriate permissions
RUN mkdir -p /.ollama && chmod 777 /.ollama
WORKDIR /.ollama
# Copy the entry point script
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
# Set the entry point script as the default command
ENTRYPOINT ["/entrypoint.sh"]
CMD ["ollama", "serve"]
# Set the model as an environment variable (this can be overridden)
ENV model=${model}
# Expose the server port
EXPOSE 7860