Sergidev commited on
Commit
0577dcd
1 Parent(s): 08d6535

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +23 -19
Dockerfile CHANGED
@@ -1,36 +1,40 @@
1
  # Use the NVIDIA CUDA image as the base
2
  FROM nvidia/cuda:12.1.1-cudnn8-runtime-ubuntu20.04
3
 
 
 
 
 
 
 
 
 
4
  # Set up a new user named "user" with user ID 1000
5
  RUN useradd -m -u 1000 user
6
 
7
- # Switch to the "user" user
8
- USER user
9
-
10
- # Set home to the user's home directory
11
- ENV HOME=/home/user \
12
- PATH=/home/user/.local/bin:$PATH
13
-
14
- # Set the working directory to the user's home directory
15
  WORKDIR /app
16
 
17
- # Install Python and pip
18
- RUN apt-get update && apt-get install -y python3 python3-pip
19
 
20
- # Install CMake and other build dependencies
21
- RUN apt-get install -y cmake build-essential
 
22
 
23
  # Install llama-cpp-python with CUDA support
24
  ENV FORCE_CMAKE=1
25
  ENV CMAKE_ARGS="-DLLAMA_CUBLAS=on"
26
- RUN pip install llama-cpp-python --no-cache-dir
27
-
28
- # Copy the current directory contents into the container at /app
29
- COPY --chown=user ./requirements.txt requirements.txt
30
- RUN pip install --no-cache-dir --upgrade -r requirements.txt
31
 
32
  # Copy the rest of the application code
33
- COPY --chown=user . /app
 
 
 
 
 
 
34
 
35
  # Run the application
36
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
 
1
  # Use the NVIDIA CUDA image as the base
2
  FROM nvidia/cuda:12.1.1-cudnn8-runtime-ubuntu20.04
3
 
4
+ # Install system dependencies
5
+ RUN apt-get update && apt-get install -y \
6
+ python3 \
7
+ python3-pip \
8
+ cmake \
9
+ build-essential \
10
+ && rm -rf /var/lib/apt/lists/*
11
+
12
  # Set up a new user named "user" with user ID 1000
13
  RUN useradd -m -u 1000 user
14
 
15
+ # Set the working directory
 
 
 
 
 
 
 
16
  WORKDIR /app
17
 
18
+ # Copy the requirements file
19
+ COPY requirements.txt .
20
 
21
+ # Install Python dependencies
22
+ RUN pip3 install --no-cache-dir --upgrade pip && \
23
+ pip3 install --no-cache-dir -r requirements.txt
24
 
25
  # Install llama-cpp-python with CUDA support
26
  ENV FORCE_CMAKE=1
27
  ENV CMAKE_ARGS="-DLLAMA_CUBLAS=on"
28
+ RUN pip3 install --no-cache-dir llama-cpp-python
 
 
 
 
29
 
30
  # Copy the rest of the application code
31
+ COPY . .
32
+
33
+ # Change ownership of the app directory to the user
34
+ RUN chown -R user:user /app
35
+
36
+ # Switch to the "user" user
37
+ USER user
38
 
39
  # Run the application
40
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]