JPBianchi commited on
Commit
d811058
1 Parent(s): 5916a52
Files changed (2) hide show
  1. Dockerfile +1 -6
  2. app/engine/vectorstore.py +2 -2
Dockerfile CHANGED
@@ -7,12 +7,6 @@ ENV PYTHONUNBUFFERED 1
7
 
8
  ENV ENVIRONMENT=dev
9
 
10
- ENV FINRAG_WEAVIATE_API_KEY=$FINRAG_WEAVIATE_API_KEY
11
- ENV FINRAG_WEAVIATE_ENDPOINT=$FINRAG_WEAVIATE_ENDPOINT
12
- ENV $(cat /run/secrets/LLAMA_PARSE_API_KEY)
13
- ENV $(cat /run/secrets/OPENAI_API_KEY)
14
- # ^ https://huggingface.co/docs/hub/en/spaces-sdks-docker#secrets
15
-
16
  COPY ./app /app
17
  WORKDIR /app
18
  RUN mkdir /data
@@ -22,6 +16,7 @@ RUN pip install --no-cache-dir --upgrade -r requirements.txt
22
 
23
  # RUN python -c "import nltk; nltk.download('stopwords')"
24
  # ^ to fix runtime error, see https://github.com/run-llama/llama_index/issues/10681
 
25
 
26
  RUN chmod -R 777 /usr/local/lib/python3.10/site-packages//llama_index/legacy/_static/nltk_cache
27
 
 
7
 
8
  ENV ENVIRONMENT=dev
9
 
 
 
 
 
 
 
10
  COPY ./app /app
11
  WORKDIR /app
12
  RUN mkdir /data
 
16
 
17
  # RUN python -c "import nltk; nltk.download('stopwords')"
18
  # ^ to fix runtime error, see https://github.com/run-llama/llama_index/issues/10681
19
+ # it didn't work, I had to do chmod below (as also suggested in the article)
20
 
21
  RUN chmod -R 777 /usr/local/lib/python3.10/site-packages//llama_index/legacy/_static/nltk_cache
22
 
app/engine/vectorstore.py CHANGED
@@ -56,8 +56,8 @@ class VectorStore:
56
 
57
  self.model_path = model_path
58
  try:
59
- self.api_key = os.environ['FINRAG_WEAVIATE_API_KEY']
60
- self.url = os.environ['FINRAG_WEAVIATE_ENDPOINT']
61
  self.client = WeaviateWCS(endpoint=self.url,
62
  api_key=self.api_key,
63
  model_name_or_path=self.model_path)
 
56
 
57
  self.model_path = model_path
58
  try:
59
+ self.api_key = os.environ.get('FINRAG_WEAVIATE_API_KEY')
60
+ self.url = os.environ.get('FINRAG_WEAVIATE_ENDPOINT')
61
  self.client = WeaviateWCS(endpoint=self.url,
62
  api_key=self.api_key,
63
  model_name_or_path=self.model_path)