Ts1i commited on
Commit
4c35f7c
1 Parent(s): 5f7d2d8

Trying pip-compile

Browse files
Files changed (3) hide show
  1. Dockerfile +2 -2
  2. requirements.in +16 -0
  3. requirements.txt +410 -12
Dockerfile CHANGED
@@ -1,4 +1,4 @@
1
- FROM python:3.9
2
  RUN useradd -m -u 1000 user
3
  USER user
4
  ENV HOME=/home/user \
@@ -6,6 +6,6 @@ ENV HOME=/home/user \
6
  WORKDIR $HOME/app
7
  COPY --chown=user . $HOME/app
8
  COPY ./requirements.txt ~/app/requirements.txt
9
- RUN pip-compile install -r requirements.txt
10
  COPY . .
11
  CMD ["chainlit", "run", "app.py", "--port", "7860"]
 
1
+ FROM python:3.10
2
  RUN useradd -m -u 1000 user
3
  USER user
4
  ENV HOME=/home/user \
 
6
  WORKDIR $HOME/app
7
  COPY --chown=user . $HOME/app
8
  COPY ./requirements.txt ~/app/requirements.txt
9
+ RUN pip install -r requirements.txt
10
  COPY . .
11
  CMD ["chainlit", "run", "app.py", "--port", "7860"]
requirements.in ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # requirements.in
2
+
3
+ pip-tools
4
+ numpy
5
+ chainlit==0.7.700
6
+ openai
7
+ langchain
8
+ langchain-openai
9
+ langchain_core
10
+ langchain-community
11
+ langchainhub
12
+ langchain-qdrant
13
+ ragas
14
+ qdrant-client
15
+ pymupdf
16
+ pandas
requirements.txt CHANGED
@@ -1,13 +1,411 @@
1
- numpy
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  chainlit==0.7.700
3
- openai
4
- langchain
5
- langchain-openai
6
- langchain_core
7
- langchain-community
8
- langchainhub
9
- langchain-qdrant
10
- ragas
11
- qdrant-client
12
- pymupdf
13
- pandas
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # This file is autogenerated by pip-compile with Python 3.11
3
+ # by the following command:
4
+ #
5
+ # pip-compile requirements.in
6
+ #
7
+ aiofiles==23.2.1
8
+ # via chainlit
9
+ aiohappyeyeballs==2.4.0
10
+ # via aiohttp
11
+ aiohttp==3.10.5
12
+ # via
13
+ # datasets
14
+ # fsspec
15
+ # langchain
16
+ # langchain-community
17
+ # python-graphql-client
18
+ aiosignal==1.3.1
19
+ # via aiohttp
20
+ annotated-types==0.7.0
21
+ # via pydantic
22
+ anyio==3.7.1
23
+ # via
24
+ # asyncer
25
+ # httpcore
26
+ # openai
27
+ # starlette
28
+ # watchfiles
29
+ appdirs==1.4.4
30
+ # via ragas
31
+ asyncer==0.0.2
32
+ # via chainlit
33
+ attrs==24.2.0
34
+ # via aiohttp
35
+ bidict==0.23.1
36
+ # via python-socketio
37
+ build==1.2.2
38
+ # via pip-tools
39
+ certifi==2024.8.30
40
+ # via
41
+ # httpcore
42
+ # httpx
43
+ # requests
44
  chainlit==0.7.700
45
+ # via -r requirements.in
46
+ charset-normalizer==3.3.2
47
+ # via requests
48
+ click==8.1.7
49
+ # via
50
+ # chainlit
51
+ # pip-tools
52
+ # uvicorn
53
+ dataclasses-json==0.5.14
54
+ # via
55
+ # chainlit
56
+ # langchain-community
57
+ datasets==3.0.0
58
+ # via ragas
59
+ deprecated==1.2.14
60
+ # via
61
+ # opentelemetry-api
62
+ # opentelemetry-exporter-otlp-proto-grpc
63
+ # opentelemetry-exporter-otlp-proto-http
64
+ # opentelemetry-semantic-conventions
65
+ dill==0.3.8
66
+ # via
67
+ # datasets
68
+ # multiprocess
69
+ distro==1.9.0
70
+ # via openai
71
+ fastapi==0.100.1
72
+ # via
73
+ # chainlit
74
+ # fastapi-socketio
75
+ fastapi-socketio==0.0.10
76
+ # via chainlit
77
+ filelock==3.16.1
78
+ # via
79
+ # datasets
80
+ # huggingface-hub
81
+ filetype==1.2.0
82
+ # via chainlit
83
+ frozenlist==1.4.1
84
+ # via
85
+ # aiohttp
86
+ # aiosignal
87
+ fsspec[http]==2024.6.1
88
+ # via
89
+ # datasets
90
+ # huggingface-hub
91
+ googleapis-common-protos==1.65.0
92
+ # via
93
+ # opentelemetry-exporter-otlp-proto-grpc
94
+ # opentelemetry-exporter-otlp-proto-http
95
+ greenlet==3.1.1
96
+ # via sqlalchemy
97
+ grpcio==1.66.1
98
+ # via
99
+ # grpcio-tools
100
+ # opentelemetry-exporter-otlp-proto-grpc
101
+ # qdrant-client
102
+ grpcio-tools==1.62.3
103
+ # via qdrant-client
104
+ h11==0.14.0
105
+ # via
106
+ # httpcore
107
+ # uvicorn
108
+ # wsproto
109
+ h2==4.1.0
110
+ # via httpx
111
+ hpack==4.0.0
112
+ # via h2
113
+ httpcore==0.17.3
114
+ # via httpx
115
+ httpx[http2]==0.24.1
116
+ # via
117
+ # chainlit
118
+ # langsmith
119
+ # openai
120
+ # qdrant-client
121
+ huggingface-hub==0.25.1
122
+ # via datasets
123
+ hyperframe==6.0.1
124
+ # via h2
125
+ idna==3.10
126
+ # via
127
+ # anyio
128
+ # httpx
129
+ # requests
130
+ # yarl
131
+ importlib-metadata==8.4.0
132
+ # via opentelemetry-api
133
+ jiter==0.5.0
134
+ # via openai
135
+ jsonpatch==1.33
136
+ # via langchain-core
137
+ jsonpointer==3.0.0
138
+ # via jsonpatch
139
+ langchain==0.3.0
140
+ # via
141
+ # -r requirements.in
142
+ # langchain-community
143
+ # ragas
144
+ langchain-community==0.3.0
145
+ # via
146
+ # -r requirements.in
147
+ # ragas
148
+ langchain-core==0.3.5
149
+ # via
150
+ # -r requirements.in
151
+ # langchain
152
+ # langchain-community
153
+ # langchain-openai
154
+ # langchain-qdrant
155
+ # langchain-text-splitters
156
+ # ragas
157
+ langchain-openai==0.2.0
158
+ # via
159
+ # -r requirements.in
160
+ # ragas
161
+ langchain-qdrant==0.1.4
162
+ # via -r requirements.in
163
+ langchain-text-splitters==0.3.0
164
+ # via langchain
165
+ langchainhub==0.1.21
166
+ # via -r requirements.in
167
+ langsmith==0.1.125
168
+ # via
169
+ # langchain
170
+ # langchain-community
171
+ # langchain-core
172
+ lazify==0.4.0
173
+ # via chainlit
174
+ marshmallow==3.22.0
175
+ # via dataclasses-json
176
+ multidict==6.1.0
177
+ # via
178
+ # aiohttp
179
+ # yarl
180
+ multiprocess==0.70.16
181
+ # via datasets
182
+ mypy-extensions==1.0.0
183
+ # via typing-inspect
184
+ nest-asyncio==1.6.0
185
+ # via
186
+ # chainlit
187
+ # ragas
188
+ numpy==1.26.4
189
+ # via
190
+ # -r requirements.in
191
+ # datasets
192
+ # langchain
193
+ # langchain-community
194
+ # pandas
195
+ # pyarrow
196
+ # qdrant-client
197
+ # ragas
198
+ openai==1.47.1
199
+ # via
200
+ # -r requirements.in
201
+ # langchain-openai
202
+ # ragas
203
+ opentelemetry-api==1.27.0
204
+ # via
205
+ # opentelemetry-exporter-otlp-proto-grpc
206
+ # opentelemetry-exporter-otlp-proto-http
207
+ # opentelemetry-instrumentation
208
+ # opentelemetry-sdk
209
+ # opentelemetry-semantic-conventions
210
+ # uptrace
211
+ opentelemetry-exporter-otlp==1.27.0
212
+ # via uptrace
213
+ opentelemetry-exporter-otlp-proto-common==1.27.0
214
+ # via
215
+ # opentelemetry-exporter-otlp-proto-grpc
216
+ # opentelemetry-exporter-otlp-proto-http
217
+ opentelemetry-exporter-otlp-proto-grpc==1.27.0
218
+ # via opentelemetry-exporter-otlp
219
+ opentelemetry-exporter-otlp-proto-http==1.27.0
220
+ # via opentelemetry-exporter-otlp
221
+ opentelemetry-instrumentation==0.48b0
222
+ # via uptrace
223
+ opentelemetry-proto==1.27.0
224
+ # via
225
+ # opentelemetry-exporter-otlp-proto-common
226
+ # opentelemetry-exporter-otlp-proto-grpc
227
+ # opentelemetry-exporter-otlp-proto-http
228
+ opentelemetry-sdk==1.27.0
229
+ # via
230
+ # opentelemetry-exporter-otlp-proto-grpc
231
+ # opentelemetry-exporter-otlp-proto-http
232
+ # uptrace
233
+ opentelemetry-semantic-conventions==0.48b0
234
+ # via opentelemetry-sdk
235
+ orjson==3.10.7
236
+ # via langsmith
237
+ packaging==23.2
238
+ # via
239
+ # build
240
+ # chainlit
241
+ # datasets
242
+ # huggingface-hub
243
+ # langchain-core
244
+ # langchainhub
245
+ # marshmallow
246
+ pandas==2.2.3
247
+ # via
248
+ # -r requirements.in
249
+ # datasets
250
+ pip-tools==7.4.1
251
+ # via -r requirements.in
252
+ portalocker==2.10.1
253
+ # via qdrant-client
254
+ protobuf==4.25.5
255
+ # via
256
+ # googleapis-common-protos
257
+ # grpcio-tools
258
+ # opentelemetry-proto
259
+ pyarrow==17.0.0
260
+ # via datasets
261
+ pydantic==2.9.2
262
+ # via
263
+ # chainlit
264
+ # fastapi
265
+ # langchain
266
+ # langchain-core
267
+ # langchain-qdrant
268
+ # langsmith
269
+ # openai
270
+ # pydantic-settings
271
+ # qdrant-client
272
+ pydantic-core==2.23.4
273
+ # via pydantic
274
+ pydantic-settings==2.5.2
275
+ # via langchain-community
276
+ pyjwt==2.9.0
277
+ # via chainlit
278
+ pymupdf==1.24.10
279
+ # via -r requirements.in
280
+ pymupdfb==1.24.10
281
+ # via pymupdf
282
+ pyproject-hooks==1.1.0
283
+ # via
284
+ # build
285
+ # pip-tools
286
+ pysbd==0.3.4
287
+ # via ragas
288
+ python-dateutil==2.9.0.post0
289
+ # via pandas
290
+ python-dotenv==1.0.1
291
+ # via
292
+ # chainlit
293
+ # pydantic-settings
294
+ python-engineio==4.9.1
295
+ # via python-socketio
296
+ python-graphql-client==0.4.3
297
+ # via chainlit
298
+ python-multipart==0.0.6
299
+ # via chainlit
300
+ python-socketio==5.11.4
301
+ # via fastapi-socketio
302
+ pytz==2024.2
303
+ # via pandas
304
+ pyyaml==6.0.2
305
+ # via
306
+ # datasets
307
+ # huggingface-hub
308
+ # langchain
309
+ # langchain-community
310
+ # langchain-core
311
+ qdrant-client==1.11.2
312
+ # via
313
+ # -r requirements.in
314
+ # langchain-qdrant
315
+ ragas==0.1.19
316
+ # via -r requirements.in
317
+ regex==2024.9.11
318
+ # via tiktoken
319
+ requests==2.32.3
320
+ # via
321
+ # datasets
322
+ # huggingface-hub
323
+ # langchain
324
+ # langchain-community
325
+ # langchainhub
326
+ # langsmith
327
+ # opentelemetry-exporter-otlp-proto-http
328
+ # python-graphql-client
329
+ # tiktoken
330
+ simple-websocket==1.0.0
331
+ # via python-engineio
332
+ six==1.16.0
333
+ # via python-dateutil
334
+ sniffio==1.3.1
335
+ # via
336
+ # anyio
337
+ # httpcore
338
+ # httpx
339
+ # openai
340
+ sqlalchemy==2.0.35
341
+ # via
342
+ # langchain
343
+ # langchain-community
344
+ starlette==0.27.0
345
+ # via fastapi
346
+ syncer==2.0.3
347
+ # via chainlit
348
+ tenacity==8.5.0
349
+ # via
350
+ # langchain
351
+ # langchain-community
352
+ # langchain-core
353
+ tiktoken==0.7.0
354
+ # via
355
+ # langchain-openai
356
+ # ragas
357
+ tomli==2.0.1
358
+ # via chainlit
359
+ tqdm==4.66.5
360
+ # via
361
+ # datasets
362
+ # huggingface-hub
363
+ # openai
364
+ types-requests==2.32.0.20240914
365
+ # via langchainhub
366
+ typing-extensions==4.12.2
367
+ # via
368
+ # fastapi
369
+ # huggingface-hub
370
+ # langchain-core
371
+ # openai
372
+ # opentelemetry-sdk
373
+ # pydantic
374
+ # pydantic-core
375
+ # sqlalchemy
376
+ # typing-inspect
377
+ typing-inspect==0.9.0
378
+ # via dataclasses-json
379
+ tzdata==2024.2
380
+ # via pandas
381
+ uptrace==1.26.0
382
+ # via chainlit
383
+ urllib3==2.2.3
384
+ # via
385
+ # qdrant-client
386
+ # requests
387
+ # types-requests
388
+ uvicorn==0.23.2
389
+ # via chainlit
390
+ watchfiles==0.20.0
391
+ # via chainlit
392
+ websockets==13.1
393
+ # via python-graphql-client
394
+ wheel==0.44.0
395
+ # via pip-tools
396
+ wrapt==1.16.0
397
+ # via
398
+ # deprecated
399
+ # opentelemetry-instrumentation
400
+ wsproto==1.2.0
401
+ # via simple-websocket
402
+ xxhash==3.5.0
403
+ # via datasets
404
+ yarl==1.12.0
405
+ # via aiohttp
406
+ zipp==3.20.2
407
+ # via importlib-metadata
408
+
409
+ # The following packages are considered to be unsafe in a requirements file:
410
+ # pip
411
+ # setuptools