ghh001 commited on
Commit
9b203c3
1 Parent(s): 75b3741

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -106,7 +106,7 @@ from transformers import (
106
  GenerationConfig
107
  )
108
  from peft import PeftModel
109
-
110
  model_path = 'meta-llama/Llama-2-13b-chat-hf'
111
  lora_path = 'zjunlp/llama2-13b-iepile-lora'
112
  config = AutoConfig.from_pretrained(model_path, trust_remote_code=True)
@@ -131,7 +131,7 @@ system_prompt = "<<SYS>>\nYou are a helpful, respectful and honest assistant. Al
131
  sintruct = "{\"instruction\": \"You are an expert in named entity recognition. Please extract entities that match the schema definition from the input. Return an empty list if the entity type does not exist. Please respond in the format of a JSON string.\", \"schema\": [\"person\", \"organization\", \"else\", \"location\"], \"input\": \"284 Robert Allenby ( Australia ) 69 71 71 73 , Miguel Angel Martin ( Spain ) 75 70 71 68 ( Allenby won at first play-off hole )\"}"
132
  sintruct = '[INST] ' + system_prompt + sintruct + ' [/INST]'
133
 
134
- input_ids = tokenizer.encode(sintruct, return_tensors="pt")
135
  input_length = input_ids.size(1)
136
  generation_output = model.generate(input_ids=input_ids, generation_config=GenerationConfig(max_length=512, max_new_tokens=256, return_dict_in_generate=True))
137
  generation_output = generation_output.sequences[0]
 
106
  GenerationConfig
107
  )
108
  from peft import PeftModel
109
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
110
  model_path = 'meta-llama/Llama-2-13b-chat-hf'
111
  lora_path = 'zjunlp/llama2-13b-iepile-lora'
112
  config = AutoConfig.from_pretrained(model_path, trust_remote_code=True)
 
131
  sintruct = "{\"instruction\": \"You are an expert in named entity recognition. Please extract entities that match the schema definition from the input. Return an empty list if the entity type does not exist. Please respond in the format of a JSON string.\", \"schema\": [\"person\", \"organization\", \"else\", \"location\"], \"input\": \"284 Robert Allenby ( Australia ) 69 71 71 73 , Miguel Angel Martin ( Spain ) 75 70 71 68 ( Allenby won at first play-off hole )\"}"
132
  sintruct = '[INST] ' + system_prompt + sintruct + ' [/INST]'
133
 
134
+ input_ids = tokenizer.encode(sintruct, return_tensors="pt").to(device)
135
  input_length = input_ids.size(1)
136
  generation_output = model.generate(input_ids=input_ids, generation_config=GenerationConfig(max_length=512, max_new_tokens=256, return_dict_in_generate=True))
137
  generation_output = generation_output.sequences[0]