mr / Dockerfile
JPBianchi's picture
error in dockerfile
689bfe4
raw
history blame
1.79 kB
FROM python:3.10-slim
ENV PYTHONDONTWRITEBYTECODE 1
# ^ saves space by not writing .pyc files
ENV PYTHONUNBUFFERED 1
# ^ ensures that the output from the Python app is sent straight to the terminal without being buffered -> real time monitoring
ENV ENVIRONMENT=dev
COPY ./app /app
COPY requirements.txt .
WORKDIR .
RUN apt-get update && apt-get install -y \
unzip \
curl \
wget \
git git-lfs \
bash \
&& rm -rf /var/lib/apt/lists/*
# reflex needs unzip curl
RUN pip install --no-cache-dir --upgrade -r app/requirements.txt
# ^ no caching of the packages to save space
RUN pip install --no-cache-dir --upgrade -r requirements.txt
# pip install guardrails-ai first
# https://huggingface.co/docs/hub/spaces-sdks-docker#secrets
RUN --mount=type=secret,id=GUARDRAILS_TOKEN,mode=0444,required=true \
guardrails configure --token $(cat /run/secrets/GUARDRAILS_TOKEN) --no-metrics True
RUN guardrails hub install hub://guardrails/regex_match
RUN guardrails hub install hub://guardrails/toxic_language
RUN guardrails hub install hub://guardrails/detect_pii
RUN guardrails hub install hub://guardrails/qa_relevance_llm_eval
# RUN python -c "import nltk; nltk.download('stopwords')"
# ^ to fix runtime error, see https://github.com/run-llama/llama_index/issues/10681
# it didn't work, I had to do chmod below (as also suggested in the article)
RUN chmod -R 777 /usr/local/lib/python3.10/site-packages/llama_index/legacy/_static/nltk_cache
ENV TRANSFORMERS_CACHE=/usr/local/lib/python3.10/site-packages/llama_index/legacy/_static/nltk_cache
# ^ not elegant but it works
# HF warning says that TRANSFORMERS_CACHE will be deprecated in transformers v5, and advise to use HF_HOME
CMD ["uvicorn", "app.main_reflex:app", "--host", "0.0.0.0", "--port", "7860"]