Spaces:
Sleeping
Sleeping
FantasticGNU
commited on
Commit
•
4223fbb
1
Parent(s):
a7fa8fe
Update model/openllama.py
Browse files- model/openllama.py +5 -5
model/openllama.py
CHANGED
@@ -205,11 +205,11 @@ class OpenLLAMAPEFTModel(nn.Module):
|
|
205 |
target_modules=['q_proj', 'k_proj', 'v_proj', 'o_proj']
|
206 |
)
|
207 |
|
208 |
-
config = AutoConfig.from_pretrained(vicuna_ckpt_path)
|
209 |
-
with init_empty_weights():
|
210 |
-
|
211 |
-
self.llama_model = load_checkpoint_and_dispatch(self.llama_model, vicuna_ckpt_path, device_map="auto", no_split_module_classes=["OPTDecoderLayer"])
|
212 |
-
|
213 |
self.llama_model = get_peft_model(self.llama_model, peft_config)
|
214 |
self.llama_model.print_trainable_parameters()
|
215 |
|
|
|
205 |
target_modules=['q_proj', 'k_proj', 'v_proj', 'o_proj']
|
206 |
)
|
207 |
|
208 |
+
# config = AutoConfig.from_pretrained(vicuna_ckpt_path)
|
209 |
+
# with init_empty_weights():
|
210 |
+
# self.llama_model = AutoModelForCausalLM.from_config(config)
|
211 |
+
# self.llama_model = load_checkpoint_and_dispatch(self.llama_model, vicuna_ckpt_path, device_map="auto", no_split_module_classes=["OPTDecoderLayer"], offload_folder="offload", offload_state_dict = True)
|
212 |
+
self.llama_model = AutoModelForCausalLM.from_pretrained(vicuna_ckpt_path, torch_dtype=torch.float16, device_map='auto', offload_folder="offload", offload_state_dict = True)
|
213 |
self.llama_model = get_peft_model(self.llama_model, peft_config)
|
214 |
self.llama_model.print_trainable_parameters()
|
215 |
|