Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -23,16 +23,16 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
23 |
|
24 |
peft_model_id = "charansr/llama2-7b-chat-hf-therapist"
|
25 |
config = PeftConfig.from_pretrained(peft_model_id,
|
26 |
-
|
27 |
newmodel = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, return_dict=True, load_in_8bit=True, device_map='auto',
|
28 |
-
|
29 |
|
30 |
newtokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path,
|
31 |
-
|
32 |
|
33 |
# Load the Lora model
|
34 |
newmodel = PeftModel.from_pretrained(newmodel, peft_model_id,
|
35 |
-
|
36 |
|
37 |
def givetext(input_text,lmodel,ltokenizer):
|
38 |
eval_prompt_pt1 = """\nBelow is an instruction that describes a task. Write a response that appropriately completes the request.\n### Instruction: Act like a therapist and respond\n\n### Input: """
|
|
|
23 |
|
24 |
peft_model_id = "charansr/llama2-7b-chat-hf-therapist"
|
25 |
config = PeftConfig.from_pretrained(peft_model_id,
|
26 |
+
use_auth_token="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")
|
27 |
newmodel = AutoModelForCausalLM.from_pretrained(config.base_model_name_or_path, return_dict=True, load_in_8bit=True, device_map='auto',
|
28 |
+
use_auth_token="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")
|
29 |
|
30 |
newtokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path,
|
31 |
+
use_auth_token="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")
|
32 |
|
33 |
# Load the Lora model
|
34 |
newmodel = PeftModel.from_pretrained(newmodel, peft_model_id,
|
35 |
+
use_auth_token="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")
|
36 |
|
37 |
def givetext(input_text,lmodel,ltokenizer):
|
38 |
eval_prompt_pt1 = """\nBelow is an instruction that describes a task. Write a response that appropriately completes the request.\n### Instruction: Act like a therapist and respond\n\n### Input: """
|