from huggingface_hub import hf_hub_download def download_models(huggingface_token): models = [ ("bartowski/Meta-Llama-3.1-8B-Instruct-GGUF", "Meta-Llama-3.1-8B-Instruct-Q5_K_M.gguf"), ("bartowski/Mistral-Nemo-Instruct-2407-GGUF", "Mistral-Nemo-Instruct-2407-Q5_K_M.gguf"), ("bartowski/gemma-2-2b-it-GGUF", "gemma-2-2b-it-Q6_K_L.gguf"), ("bartowski/openchat-3.6-8b-20240522-GGUF", "openchat-3.6-8b-20240522-Q6_K.gguf"), ("bartowski/Llama-3-Groq-8B-Tool-Use-GGUF", "Llama-3-Groq-8B-Tool-Use-Q6_K.gguf"), ("bartowski/MiniCPM-V-2_6-GGUF", "MiniCPM-V-2_6-Q6_K.gguf"), ("CaioXapelaum/Llama-3.1-Storm-8B-Q5_K_M-GGUF", "llama-3.1-storm-8b-q5_k_m.gguf"), ("CaioXapelaum/Orca-2-7b-Patent-Instruct-Llama-2-Q5_K_M-GGUF", "orca-2-7b-patent-instruct-llama-2-q5_k_m.gguf"), ] for repo_id, filename in models: hf_hub_download( repo_id=repo_id, filename=filename, local_dir="./models", token=huggingface_token )