File size: 582 Bytes
e37abe2
2800109
b0c7b3b
9193698
f3be2ab
b081d44
cf0efb2
 
 
1
2
3
4
5
6
7
8
9
import subprocess

# commented because the existing llama-cpp-python package was renoved fron requirements.txt
# subprocess.run("pip uninstall -y llama-cpp-python", shell=True)

install_command = "CMAKE_ARGS='-DGGML_CUDA=on -DCUDA_PATH=/usr/local/cuda-12.2 -DCUDAToolkit_ROOT=/usr/local/cuda-12.2 -DCUDAToolkit_INCLUDE_DIR=/usr/local/cuda-12.2/include -DCUDAToolkit_LIBRARY_DIR=/usr/local/cuda-12.2/lib64' FORCE_CMAKE=1 pip install llama-cpp-python --no-cache-dir"
subprocess.run(install_command, shell=True)

subprocess.run("uvicorn app:app --host 0.0.0.0 --port 7860", shell=True)