zhixuan-lin commited on
Commit
d4dddf2
·
verified ·
1 Parent(s): bfa2cc7

Upload TransformerForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +7 -16
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,41 +1,32 @@
1
  {
2
- "_name_or_path": "/network/scratch/z/zhixuan.lin/linear-rnn-torch/release/fox-pro-760m-longcrawl64-48b",
3
  "architectures": [
4
- "ForgettingTransformerForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "bos_token_id": null,
8
- "decay_time_max": null,
9
- "decay_time_min": null,
10
  "elementwise_affine": true,
11
  "eos_token_id": null,
12
- "fgate_bias_init": false,
13
- "fgate_type": "full",
14
  "fuse_cross_entropy": true,
15
  "fuse_norm": true,
16
  "hidden_act": "swish",
17
- "hidden_ratio": 3.5,
18
  "hidden_size": 1536,
19
  "initializer_range": 0.02,
20
  "intermediate_size": null,
21
  "max_position_embeddings": null,
22
- "model_type": "forgetting_transformer-project_fox",
23
  "norm_eps": 1e-06,
24
- "num_heads": 24,
25
  "num_hidden_layers": 24,
26
  "num_kv_heads": null,
27
- "ogate_act": "sigmoid",
28
- "qk_norm": true,
29
  "rope_base": 500000.0,
30
  "tie_word_embeddings": false,
31
  "torch_dtype": "float32",
32
  "transformers_version": "4.44.0",
33
  "use_cache": true,
34
- "use_k_shift": true,
35
- "use_output_gate": true,
36
- "use_output_norm": true,
37
- "use_rope": false,
38
- "use_v_shift": true,
39
  "vocab_size": 50257,
40
  "window_size": null
41
  }
 
1
  {
2
+ "_name_or_path": "/network/scratch/z/zhixuan.lin/linear-rnn-torch/release/transformer-llama-760m-longcrawl64-48b",
3
  "architectures": [
4
+ "TransformerForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "bos_token_id": null,
 
 
8
  "elementwise_affine": true,
9
  "eos_token_id": null,
 
 
10
  "fuse_cross_entropy": true,
11
  "fuse_norm": true,
12
  "hidden_act": "swish",
13
+ "hidden_ratio": 4,
14
  "hidden_size": 1536,
15
  "initializer_range": 0.02,
16
  "intermediate_size": null,
17
  "max_position_embeddings": null,
18
+ "model_type": "transformer-project_fox",
19
  "norm_eps": 1e-06,
20
+ "num_heads": 12,
21
  "num_hidden_layers": 24,
22
  "num_kv_heads": null,
23
+ "qk_norm": false,
 
24
  "rope_base": 500000.0,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "float32",
27
  "transformers_version": "4.44.0",
28
  "use_cache": true,
29
+ "use_rope": true,
 
 
 
 
30
  "vocab_size": 50257,
31
  "window_size": null
32
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1eece24e2dcee0c560f576273590ff77ed93eadccfa045270ef514abe5e58124
3
- size 3346590184
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0c8ff874672c54f6a27cd7f3d5ca8d060e6c115b5be38b95c7a7ef8c71b5eb1
3
+ size 3335789480