try fixing cmake
Browse files
start.py
CHANGED
@@ -1,11 +1,17 @@
|
|
1 |
import subprocess
|
|
|
2 |
|
3 |
# Uninstall the existing llama-cpp-python package
|
4 |
-
#
|
|
|
5 |
|
6 |
# Install llama-cpp-python with CUDA support
|
7 |
-
install_command = "
|
|
|
|
|
8 |
subprocess.run(install_command, shell=True)
|
9 |
|
10 |
# Start the Hugging Face Space
|
11 |
-
|
|
|
|
|
|
1 |
import subprocess
|
2 |
+
import sys
|
3 |
|
4 |
# Uninstall the existing llama-cpp-python package
|
5 |
+
# uninstall_command = ["pip", "uninstall", "-y", "llama-cpp-python"]
|
6 |
+
# subprocess.run(uninstall_command, shell=True)
|
7 |
|
8 |
# Install llama-cpp-python with CUDA support
|
9 |
+
install_command = ["pip", "install", "--upgrade", "--force-reinstall", "llama-cpp-python", "--no-cache-dir"]
|
10 |
+
install_command_cmake_args = ["CMAKE_ARGS=-DLLAMA_CUBLAS=on", "FORCE_CMAKE=1"]
|
11 |
+
install_command.extend(install_command_cmake_args)
|
12 |
subprocess.run(install_command, shell=True)
|
13 |
|
14 |
# Start the Hugging Face Space
|
15 |
+
uvicorn_command = ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
16 |
+
subprocess.run(uvicorn_command, shell=True)
|
17 |
+
subprocess.run(install_command, shell=True)
|