jtatman commited on
Commit
f95286d
1 Parent(s): 22c08c9

Upload GPTNeoXForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "EleutherAI/pythia-125m-deduped",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -22,9 +22,9 @@
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 0.25,
24
  "tie_word_embeddings": false,
25
- "torch_dtype": "float16",
26
  "transformers_version": "4.41.2",
27
- "use_cache": false,
28
  "use_parallel_residual": true,
29
- "vocab_size": 50304
30
  }
 
1
  {
2
+ "_name_or_path": "./pythia-125m-gpt4-llm-cleaned",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 0.25,
24
  "tie_word_embeddings": false,
25
+ "torch_dtype": "float32",
26
  "transformers_version": "4.41.2",
27
+ "use_cache": true,
28
  "use_parallel_residual": true,
29
+ "vocab_size": 50282
30
  }