Abhaykoul commited on
Commit
4b00003
•
1 Parent(s): 3450842

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +36 -0
README.md CHANGED
@@ -77,7 +77,43 @@ inputs = tokenizer(prompt, return_tensors="pt", return_attention_mask=False).to(
77
  # Here comes the fun part! Let's unleash the power of HelpingAI-3B to generate some awesome text
78
  generated_text = model.generate(**inputs, max_length=3084, top_p=0.95, do_sample=True, temperature=0.6, use_cache=True, streamer=streamer)
79
  ```
 
80
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  ## Example Dialogue
82
  > Express joy and excitement about visiting a new place.
83
 
 
77
  # Here comes the fun part! Let's unleash the power of HelpingAI-3B to generate some awesome text
78
  generated_text = model.generate(**inputs, max_length=3084, top_p=0.95, do_sample=True, temperature=0.6, use_cache=True, streamer=streamer)
79
  ```
80
+ *Directly using this model from GGUF*
81
 
82
+ ```python
83
+ %pip install -U 'webscout[local]'
84
+
85
+ from webscout.Local.utils import download_model
86
+ from webscout.Local.model import Model
87
+ from webscout.Local.thread import Thread
88
+ from webscout.Local import formats
89
+ from webscout.Local.samplers import SamplerSettings
90
+
91
+
92
+ # 1. Download the model
93
+ repo_id = "OEvortex/HelpingAI-3B-chat"
94
+ filename = "helpingai-3b-chat-q4_k_m.gguf"
95
+ model_path = download_model(repo_id, filename, token='') # Replace with your Hugging Face token also you can keep it empty
96
+
97
+ # 2. Load the model
98
+ model = Model(model_path, n_gpu_layers=20)
99
+
100
+ # 3. Define your system prompt
101
+ system_prompt = "You are HelpingAI a emotional AI always answer my question in HelpingAI style and to the point"
102
+
103
+ # 4. Create a custom chatml format with your system prompt
104
+ custom_chatml = formats.chatml.copy()
105
+ custom_chatml['system_content'] = system_prompt
106
+
107
+ # 5. Define your sampler settings (optional)
108
+ sampler = SamplerSettings(temp=0.7, top_p=0.9) # Adjust these values as needed
109
+
110
+ # 6. Create a Thread with the custom format and sampler
111
+ thread = Thread(model, custom_chatml, sampler=sampler)
112
+
113
+ # 7. Start interacting with the model
114
+ thread.interact(header="🌟 HelpingAI-3B-chat: Emotionally Intelligent SLM 🚀", color=True)
115
+
116
+ ```
117
  ## Example Dialogue
118
  > Express joy and excitement about visiting a new place.
119