Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -12,6 +12,7 @@ if not os.path.exists(OLLAMA):
|
|
12 |
subprocess.run("curl -L https://ollama.com/download/ollama-linux-amd64 -o ~/ollama", shell=True)
|
13 |
os.chmod(OLLAMA, 0o755)
|
14 |
|
|
|
15 |
def ollama_service_thread():
|
16 |
subprocess.run("~/ollama serve", shell=True)
|
17 |
|
@@ -59,7 +60,7 @@ h3 {
|
|
59 |
"""
|
60 |
|
61 |
# Remove this if in CPU
|
62 |
-
|
63 |
def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
64 |
|
65 |
conversation = []
|
|
|
12 |
subprocess.run("curl -L https://ollama.com/download/ollama-linux-amd64 -o ~/ollama", shell=True)
|
13 |
os.chmod(OLLAMA, 0o755)
|
14 |
|
15 |
+
@spaces.GPU()
|
16 |
def ollama_service_thread():
|
17 |
subprocess.run("~/ollama serve", shell=True)
|
18 |
|
|
|
60 |
"""
|
61 |
|
62 |
# Remove this if in CPU
|
63 |
+
|
64 |
def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
65 |
|
66 |
conversation = []
|