TransformerAnalyzer / model_util.py
Alan Liu
inference speed
3698d0a
raw
history blame
536 Bytes
import requests
def fetch_dictionary_content(model_id):
MODEL_URL = "https://huggingface.co/{model_id}/raw/main/config.json"
response = requests.get(MODEL_URL.format(model_id=model_id))
# Check if the request was successful
if response.status_code == 200:
return response.json() # Parse the JSON content into a Python dictionary
else:
return None
def load_parameter(model_dict, cand_keys):
for k in cand_keys:
if k in model_dict:
return model_dict[k]
return 0