|
|
|
FROM nvidia/cuda:12.1.1-cudnn8-runtime-ubuntu20.04 |
|
|
|
|
|
ENV DEBIAN_FRONTEND=noninteractive |
|
|
|
|
|
RUN apt-get update && apt-get install -y \ |
|
python3 \ |
|
python3-pip \ |
|
cmake \ |
|
build-essential \ |
|
&& rm -rf /var/lib/apt/lists/* |
|
|
|
|
|
RUN useradd -m -u 1000 user |
|
|
|
|
|
WORKDIR /app |
|
|
|
|
|
COPY requirements.txt . |
|
|
|
|
|
RUN pip3 install --no-cache-dir --upgrade pip && \ |
|
pip3 install --no-cache-dir -r requirements.txt |
|
|
|
|
|
|
|
|
|
|
|
RUN pip3 install llama-cpp-python \ |
|
--extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cu121 |
|
|
|
|
|
COPY . . |
|
|
|
|
|
RUN chown -R user:user /app |
|
|
|
|
|
USER user |
|
|
|
|
|
ENV PATH="/home/user/.local/bin:$PATH" |
|
|
|
|
|
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] |