Sergidev commited on
Commit
19d4d38
1 Parent(s): e838a30

Fix Mlock

Files changed (1) hide show
  1. modules/pmbl.py +2 -2
modules/pmbl.py CHANGED
@@ -102,7 +102,7 @@ class PMBL:
102
  yield chunk
103
 
104
  def generate_response_task(self, system_prompt, prompt, n_ctx):
105
- llm = Llama(model_path=self.model_path, n_ctx=n_ctx, n_threads=8, mlock=true)
106
 
107
  response = llm(
108
  system_prompt,
@@ -148,7 +148,7 @@ class PMBL:
148
  conn.close()
149
 
150
  def generate_topic(self, prompt, response):
151
- llm = Llama(model_path=self.model_path, n_ctx=1690, n_threads=8, mlock=true)
152
 
153
  system_prompt = f"Based on the following interaction between a user and an AI assistant, generate a concise topic for the conversation in 2-4 words:\n\nUser: {prompt}\nAssistant: {response}\n\nTopic:"
154
 
 
102
  yield chunk
103
 
104
  def generate_response_task(self, system_prompt, prompt, n_ctx):
105
+ llm = Llama(model_path=self.model_path, n_ctx=n_ctx, n_threads=8, mlock=True)
106
 
107
  response = llm(
108
  system_prompt,
 
148
  conn.close()
149
 
150
  def generate_topic(self, prompt, response):
151
+ llm = Llama(model_path=self.model_path, n_ctx=1690, n_threads=8, mlock=True)
152
 
153
  system_prompt = f"Based on the following interaction between a user and an AI assistant, generate a concise topic for the conversation in 2-4 words:\n\nUser: {prompt}\nAssistant: {response}\n\nTopic:"
154