PMAlpha / start.py
Sergidev's picture
v1 commented
d8bde42 verified
raw
history blame
No virus
564 Bytes
import subprocess
# Uninstall the existing llama-cpp-python package
#subprocess.run("pip uninstall -y llama-cpp-python", shell=True)
# Install llama-cpp-python with CUDA support
# install_command = "CMAKE_ARGS='-DLLAMA_CUDA=on -DCUDA_PATH=/path/to/cuda -DCUDAToolkit_ROOT=/path/to/cuda -DCUDAToolkit_INCLUDE_DIR=/path/to/cuda/include -DCUDAToolkit_LIBRARY_DIR=/path/to/cuda/lib64' pip install llama-cpp-python"
#subprocess.run(install_command, shell=True)
# Start the Hugging Face Space
subprocess.run("uvicorn app:app --host 0.0.0.0 --port 7860", shell=True)