File size: 562 Bytes
e37abe2
2800109
f3be2ab
 
 
 
d841c73
 
b1f117d
f3be2ab
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
import subprocess

# Uninstall the existing llama-cpp-python package
subprocess.run("pip uninstall -y llama-cpp-python", shell=True)

# Install llama-cpp-python with CUDA support
install_command = "CMAKE_ARGS='-DLLAMA_CUBLAS=on -DCUDA_PATH=/path/to/cuda -DCUDAToolkit_ROOT=/path/to/cuda -DCUDAToolkit_INCLUDE_DIR=/path/to/cuda/include -DCUDAToolkit_LIBRARY_DIR=/path/to/cuda/lib64' pip install llama-cpp-python"

subprocess.run(install_command, shell=True)

# Start the Hugging Face Space
subprocess.run("uvicorn app:app --host 0.0.0.0 --port 7860", shell=True)