ThisIsATest commited on
Commit
a3b449c
·
verified ·
1 Parent(s): ffca336

Upload GPTNeoXForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +5 -5
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "dclm_id_160m/0/final",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
@@ -10,14 +10,14 @@
10
  "eos_token_id": 0,
11
  "hidden_act": "gelu",
12
  "hidden_dropout": 0.0,
13
- "hidden_size": 768,
14
  "initializer_range": 0.02,
15
- "intermediate_size": 3072,
16
  "layer_norm_eps": 1e-05,
17
  "max_position_embeddings": 2048,
18
  "model_type": "gpt_neox",
19
- "num_attention_heads": 12,
20
- "num_hidden_layers": 12,
21
  "rope_scaling": null,
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 0.25,
 
1
  {
2
+ "_name_or_path": "dclm_id_410m/0/final",
3
  "architectures": [
4
  "GPTNeoXForCausalLM"
5
  ],
 
10
  "eos_token_id": 0,
11
  "hidden_act": "gelu",
12
  "hidden_dropout": 0.0,
13
+ "hidden_size": 1024,
14
  "initializer_range": 0.02,
15
+ "intermediate_size": 4096,
16
  "layer_norm_eps": 1e-05,
17
  "max_position_embeddings": 2048,
18
  "model_type": "gpt_neox",
19
+ "num_attention_heads": 16,
20
+ "num_hidden_layers": 24,
21
  "rope_scaling": null,
22
  "rotary_emb_base": 10000,
23
  "rotary_pct": 0.25,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:14b27ef98298d20a7ddbebc03dc12cce838649ddde7410f55ef2c8b7ba0fcce3
3
- size 649308728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:526e0143e5f10d35f5d2a74833b8a5d47441202aaa92cd1892148c439b192799
3
+ size 1621370224