Spaces:
Build error
Build error
Yeahhh
Browse files- Dockerfile +62 -0
- samsapp.py +33 -0
Dockerfile
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#
|
2 |
+
# Python Jupyter notebooks for Artificial Intelligence (AI) / Machine Learning (ML) on Ubuntu 22.04 LTS
|
3 |
+
#
|
4 |
+
# Source: https://github.com/machine-learning-helpers/docker-python-jupyter/blob/master/ubuntu2204/Dockerfile
|
5 |
+
#
|
6 |
+
# References:
|
7 |
+
# * ML specific Dockerfile: https://github.com/machine-learning-helpers/docker-python-jupyter/tree/master/ubuntu2204
|
8 |
+
# * ML specific images on Docker Hub: https://hub.docker.com/repository/docker/infrahelpers/python-jupyter
|
9 |
+
# * C++/Python generic Dockerfile: https://github.com/cpp-projects-showcase/docker-images/tree/master/ubuntu2204
|
10 |
+
# * C++/Python generic images on Docker Hub: https://hub.docker.com/repository/docker/infrahelpers/cpppython
|
11 |
+
#
|
12 |
+
FROM infrahelpers/cpppython:ubuntu2204
|
13 |
+
|
14 |
+
LABEL authors="Denis Arnaud <denis.arnaud_github at m4x dot org>"
|
15 |
+
|
16 |
+
LABEL version="0.1"
|
17 |
+
|
18 |
+
# Configuration
|
19 |
+
ENV container docker
|
20 |
+
ENV HOME /home/build
|
21 |
+
ENV LANGUAGE en_US:en
|
22 |
+
ENV LANG en_US.UTF-8
|
23 |
+
ENV LC_ALL $LANG
|
24 |
+
|
25 |
+
# Create the top directories to host the notebook and data samples
|
26 |
+
USER root
|
27 |
+
RUN mkdir -p /notebook /data && chown build.build /notebook /data
|
28 |
+
|
29 |
+
# Switch to the `build` user
|
30 |
+
USER build
|
31 |
+
|
32 |
+
# Python
|
33 |
+
ENV PATH $HOME/.pyenv/bin:$HOME/.pyenv/shims:$PATH
|
34 |
+
|
35 |
+
# Download some sample notebooks and data sets
|
36 |
+
WORKDIR /notebook
|
37 |
+
RUN git clone https://github.com/machine-learning-helpers/induction-python.git .
|
38 |
+
WORKDIR /data
|
39 |
+
RUN git clone https://github.com/machine-learning-helpers/data-samples.git .
|
40 |
+
|
41 |
+
# The base directory is where the Jupyter notebook samples have been dowloaded.
|
42 |
+
# However, that directory may be overshadowed by the one the user running that
|
43 |
+
# Docker image may wish to mount instead.
|
44 |
+
# There is content (which can be overshadowed) in that directory just in case
|
45 |
+
# the user does not want to mount her/his own Jupyter notebooks.
|
46 |
+
# Same comment for the content of the /data directory.
|
47 |
+
VOLUME /notebook
|
48 |
+
WORKDIR /notebook
|
49 |
+
|
50 |
+
# Install the Python dependencies, including Jupyter
|
51 |
+
RUN pipenv install
|
52 |
+
|
53 |
+
# Tell Docker that about the Jupyter port
|
54 |
+
EXPOSE 8088
|
55 |
+
EXPOSE 1140
|
56 |
+
|
57 |
+
RUN pip install -r requirements.txt
|
58 |
+
RUN python samsapp.py &
|
59 |
+
|
60 |
+
# Launch Jupyter
|
61 |
+
CMD pipenv run jupyter lab --allow-root --no-browser --ip 0.0.0.0 --IdendityProvider.token=
|
62 |
+
|
samsapp.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from llama_cpp import Llama
|
2 |
+
from huggingface_hub import hf_hub_download
|
3 |
+
from flask import Flask, request, jsonify
|
4 |
+
|
5 |
+
app = Flask(__name__)
|
6 |
+
|
7 |
+
hf_hub_download("TheBloke/phi-2-GGUF", "phi-2.Q8_0.gguf", local_dir="./")
|
8 |
+
phi = Llama(model_path="./phi-2.Q8_0.gguf", n_ctx=2048, n_gpu_layers=999)
|
9 |
+
|
10 |
+
app.route("/spaces/MrOvkill/phastfi/", methods=["GET"])
|
11 |
+
def index():
|
12 |
+
return "<html><body><h1>Use API</h1><p>Use /completion as POST with a prompt in a JSON query.</p></body></html>"
|
13 |
+
|
14 |
+
@app.route("/spaces/MrOvkill/phastfi/generate", methods=["POST"])
|
15 |
+
def completion():
|
16 |
+
prompt = request.json["prompt"]
|
17 |
+
res = phi(
|
18 |
+
prompt,
|
19 |
+
temperature=0.33,
|
20 |
+
top_p=0.95,
|
21 |
+
top_k=42,
|
22 |
+
max_tokens=1024,
|
23 |
+
num_completions=2,
|
24 |
+
)
|
25 |
+
return jsonify({
|
26 |
+
"responses": [
|
27 |
+
res["choices"][0]["text"],
|
28 |
+
res["choices"][1]["text"]
|
29 |
+
]
|
30 |
+
})
|
31 |
+
|
32 |
+
if __name__ == "__main__":
|
33 |
+
app.run(host="0.0.0.0", port=1140)
|