File size: 1,853 Bytes
10d6a86
 
 
 
 
 
 
 
 
 
8d3cc9f
04c92e3
c57504d
 
10d6a86
09b3b7b
 
 
 
 
689bfe4
09b3b7b
 
 
12fff34
10d6a86
02e58b8
10d6a86
0c25239
 
 
 
 
 
 
 
 
 
439aec6
e850c75
d811058
e850c75
88ae985
0415073
88ae985
 
809d184
 
cd03528
04c92e3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
FROM python:3.10-slim

ENV PYTHONDONTWRITEBYTECODE 1  
# ^ saves space by not writing .pyc files
ENV PYTHONUNBUFFERED 1  
# ^ ensures that the output from the Python app is sent straight to the terminal without being buffered -> real time monitoring

ENV ENVIRONMENT=dev 

COPY ./app /app
COPY requirements.txt .
WORKDIR .
RUN mkdir -p data
RUN mkdir -p assets/data

RUN apt-get update && apt-get install -y \
    unzip \
    curl \
    wget \
    git git-lfs \
    bash \
    && rm -rf /var/lib/apt/lists/*
# reflex needs unzip curl 

RUN pip install --no-cache-dir --upgrade -r app/requirements.txt
# ^ no caching of the packages to save space
RUN pip install --no-cache-dir --upgrade -r requirements.txt

# pip install guardrails-ai first
# https://huggingface.co/docs/hub/spaces-sdks-docker#secrets
RUN --mount=type=secret,id=GUARDRAILS_TOKEN,mode=0444,required=true \
  guardrails configure --token $(cat /run/secrets/GUARDRAILS_TOKEN) --no-metrics True
RUN guardrails hub install hub://guardrails/regex_match
RUN guardrails hub install hub://guardrails/toxic_language
RUN guardrails hub install hub://guardrails/detect_pii
RUN guardrails hub install hub://guardrails/qa_relevance_llm_eval


# RUN python -c "import nltk; nltk.download('stopwords')"
# ^ to fix runtime error, see https://github.com/run-llama/llama_index/issues/10681
# it didn't work, I had to do chmod below (as also suggested in the article)

RUN chmod -R 777 /usr/local/lib/python3.10/site-packages/llama_index/legacy/_static/nltk_cache
RUN chmod -R 777 /.cache

ENV TRANSFORMERS_CACHE=/usr/local/lib/python3.10/site-packages/llama_index/legacy/_static/nltk_cache
# ^ not elegant but it works
# HF warning says that TRANSFORMERS_CACHE will be deprecated in transformers v5, and advise to use HF_HOME

CMD ["uvicorn", "app.main_reflex:app", "--host", "0.0.0.0", "--port", "7860"]