LLM-4x7B-GGUF-Chat-V1 / Dockerfile
chenhaodev's picture
Update Dockerfile
d5d2c27 verified
raw
history blame contribute delete
673 Bytes
# Use an alias for the base image for easier updates
FROM python:3.10 as base
# Set model
ENV MODEL=TheBloke/Beyonder-4x7B-v2-GGUF
ENV QUANT=Q3_K_M
ENV CHAT_TEMPLATE=chatml
# Set the working directory
WORKDIR /app
# Install Python requirements
COPY ./requirements.txt /app/
RUN pip install --no-cache-dir --upgrade -r requirements.txt
# Download model
RUN MODEL_NAME_FILE=$(echo ${MODEL#*/} | tr '[:upper:]' '[:lower:]' | sed 's/-gguf$//') && \
wget https://huggingface.co/TheBloke/Beyonder-4x7B-v2-GGUF/resolve/main/beyonder-4x7b-v2.Q3_K_M.gguf -O model.gguf
# Copy the rest of your application
COPY . .
# Command to run the application
CMD ["python", "app.py"]