File size: 405 Bytes
e37abe2 2800109 f3be2ab 83ccd10 f3be2ab 83ccd10 f3be2ab 83ccd10 |
1 2 3 4 5 6 7 8 9 10 11 |
import subprocess
# Uninstall the existing llama-cpp-python package
subprocess.run("pip uninstall -y llama-cpp-python", shell=True)
# Install llama-cpp-python with CUDA support
install_command = "CMAKE_ARGS='-DLLAMA_CUDA=on' pip install llama-cpp-python"
subprocess.run(install_command, shell=True)
# Start the Hugging Face Space
subprocess.run("uvicorn app:app --host 0.0.0.0 --port 7860", shell=True) |