Update README.md
Browse files
README.md
CHANGED
@@ -35,13 +35,13 @@ This is the model card of a 🤗 transformers model that has been pushed on the
|
|
35 |
|
36 |
## Uses
|
37 |
|
38 |
-
|
39 |
|
40 |
import torch
|
41 |
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
42 |
import re
|
43 |
|
44 |
-
|
45 |
|
46 |
model_id = "jaeyoungk/albatross"
|
47 |
bnb_config = BitsAndBytesConfig(
|
@@ -54,7 +54,7 @@ bnb_config = BitsAndBytesConfig(
|
|
54 |
tokenizer = AutoTokenizer.from_pretrained('meta-llama/Meta-Llama-3-8B-Instruct')
|
55 |
model = AutoModelForCausalLM.from_pretrained(model_id, quantization_config=bnb_config, device_map='auto')
|
56 |
|
57 |
-
|
58 |
|
59 |
def gen(x):
|
60 |
system_prompt = f"""
|
|
|
35 |
|
36 |
## Uses
|
37 |
|
38 |
+
``` python
|
39 |
|
40 |
import torch
|
41 |
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
42 |
import re
|
43 |
|
44 |
+
|
45 |
|
46 |
model_id = "jaeyoungk/albatross"
|
47 |
bnb_config = BitsAndBytesConfig(
|
|
|
54 |
tokenizer = AutoTokenizer.from_pretrained('meta-llama/Meta-Llama-3-8B-Instruct')
|
55 |
model = AutoModelForCausalLM.from_pretrained(model_id, quantization_config=bnb_config, device_map='auto')
|
56 |
|
57 |
+
|
58 |
|
59 |
def gen(x):
|
60 |
system_prompt = f"""
|