|
{ |
|
"_name_or_path": "/root/.cache/huggingface/hub/models--bigcode--starcoder2-7b/snapshots/bb9afde76d7945da5745592525db122d4d729eb1", |
|
"activation_function": "gelu", |
|
"architectures": [ |
|
"Starcoder2ForCausalLM" |
|
], |
|
"attention_dropout": 0.1, |
|
"attention_softmax_in_fp32": true, |
|
"bos_token_id": 0, |
|
"compression_config": { |
|
"config_groups": { |
|
"group_0": { |
|
"input_activations": { |
|
"block_structure": null, |
|
"dynamic": true, |
|
"group_size": null, |
|
"num_bits": 8, |
|
"observer": "memoryless", |
|
"observer_kwargs": {}, |
|
"strategy": "token", |
|
"symmetric": true, |
|
"type": "int" |
|
}, |
|
"output_activations": null, |
|
"targets": [ |
|
"Linear" |
|
], |
|
"weights": { |
|
"block_structure": null, |
|
"dynamic": false, |
|
"group_size": null, |
|
"num_bits": 8, |
|
"observer": "minmax", |
|
"observer_kwargs": {}, |
|
"strategy": "channel", |
|
"symmetric": true, |
|
"type": "int" |
|
} |
|
} |
|
}, |
|
"format": "int-quantized", |
|
"global_compression_ratio": 1.229096047677562, |
|
"ignore": [ |
|
"lm_head" |
|
], |
|
"kv_cache_scheme": null, |
|
"quant_method": "compressed-tensors", |
|
"quantization_status": "frozen", |
|
"sparsity_config": { |
|
"format": "dense", |
|
"global_sparsity": 1.3962297793225527, |
|
"registry_requires_subclass": false, |
|
"sparsity_structure": "unstructured" |
|
} |
|
}, |
|
"embedding_dropout": 0.1, |
|
"eos_token_id": 0, |
|
"hidden_act": "gelu_pytorch_tanh", |
|
"hidden_size": 4608, |
|
"initializer_range": 0.018042, |
|
"intermediate_size": 18432, |
|
"layer_norm_epsilon": 1e-05, |
|
"max_position_embeddings": 16384, |
|
"mlp_type": "default", |
|
"model_type": "starcoder2", |
|
"norm_epsilon": 1e-05, |
|
"norm_type": "layer_norm", |
|
"num_attention_heads": 36, |
|
"num_hidden_layers": 32, |
|
"num_key_value_heads": 4, |
|
"residual_dropout": 0.1, |
|
"rope_theta": 1000000, |
|
"scale_attention_softmax_in_fp32": true, |
|
"scale_attn_weights": true, |
|
"sliding_window": 4096, |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.43.3", |
|
"use_bias": true, |
|
"use_cache": true, |
|
"vocab_size": 49152 |
|
} |