codegood commited on
Commit
505126c
1 Parent(s): 97369d7

Upload 13 files

Browse files
adapter_config.json CHANGED
@@ -19,10 +19,10 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "Wqkv",
23
  "fc2",
24
  "linear",
25
- "out_proj",
26
  "fc1"
27
  ],
28
  "task_type": "CAUSAL_LM"
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "out_proj",
23
  "fc2",
24
  "linear",
25
+ "Wqkv",
26
  "fc1"
27
  ],
28
  "task_type": "CAUSAL_LM"
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c2f7c3f5cbce07a4c89a776da65470fb7b4aa9e4fd533caac143f8eabf974dff
3
  size 53764520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0eb24a5ca10d959e04d38fcdc3f316c3ce77ee5dbacbf39790b79106de8f5e30
3
  size 53764520
config.json CHANGED
@@ -4,25 +4,17 @@
4
  "architectures": [
5
  "PhiForCausalLM"
6
  ],
7
- "attention_dropout": 0.0,
8
  "attn_pdrop": 0.0,
9
  "auto_map": {
10
  "AutoConfig": "microsoft/phi-1_5--configuration_phi.PhiConfig",
11
  "AutoModelForCausalLM": "microsoft/phi-1_5--modeling_phi.PhiForCausalLM"
12
  },
13
- "bos_token_id": 1,
14
  "embd_pdrop": 0.0,
15
- "eos_token_id": 2,
16
  "flash_attn": false,
17
  "flash_rotary": false,
18
  "fused_dense": false,
19
- "hidden_act": "gelu_new",
20
- "hidden_size": 2048,
21
  "initializer_range": 0.02,
22
- "intermediate_size": 8192,
23
- "layer_norm_eps": 1e-05,
24
  "layer_norm_epsilon": 1e-05,
25
- "max_position_embeddings": 2048,
26
  "model_type": "phi",
27
  "n_embd": 2048,
28
  "n_head": 32,
@@ -30,17 +22,10 @@
30
  "n_inner": null,
31
  "n_layer": 24,
32
  "n_positions": 2048,
33
- "num_attention_heads": 32,
34
- "num_hidden_layers": 24,
35
- "partial_rotary_factor": 0.5,
36
- "qk_layernorm": false,
37
  "resid_pdrop": 0.0,
38
- "rope_scaling": null,
39
- "rope_theta": 10000.0,
40
  "rotary_dim": 32,
41
  "tie_word_embeddings": false,
42
- "torch_dtype": "float16",
43
- "transformers_version": "4.36.0",
44
- "use_cache": true,
45
  "vocab_size": 51200
46
  }
 
4
  "architectures": [
5
  "PhiForCausalLM"
6
  ],
 
7
  "attn_pdrop": 0.0,
8
  "auto_map": {
9
  "AutoConfig": "microsoft/phi-1_5--configuration_phi.PhiConfig",
10
  "AutoModelForCausalLM": "microsoft/phi-1_5--modeling_phi.PhiForCausalLM"
11
  },
 
12
  "embd_pdrop": 0.0,
 
13
  "flash_attn": false,
14
  "flash_rotary": false,
15
  "fused_dense": false,
 
 
16
  "initializer_range": 0.02,
 
 
17
  "layer_norm_epsilon": 1e-05,
 
18
  "model_type": "phi",
19
  "n_embd": 2048,
20
  "n_head": 32,
 
22
  "n_inner": null,
23
  "n_layer": 24,
24
  "n_positions": 2048,
 
 
 
 
25
  "resid_pdrop": 0.0,
 
 
26
  "rotary_dim": 32,
27
  "tie_word_embeddings": false,
28
+ "torch_dtype": "bfloat16",
29
+ "transformers_version": "4.35.2",
 
30
  "vocab_size": 51200
31
  }
generation_config.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
  "_from_model_config": true,
3
- "transformers_version": "4.36.0"
4
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "transformers_version": "4.35.2"
4
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:473d391a5be14131396f1ad652d7292ddfa12b275cdfea1804a8b892b9f130f8
3
- size 2836568312
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f175cd5282f0055a6925ea500990e1a0b4cd58e823cb9f13125222650537644
3
+ size 2836567400