SreyanG-NVIDIA commited on
Commit
f41a1c2
·
verified ·
1 Parent(s): e542af3

Upload AudioFlamingo3ForConditionalGeneration

Browse files
Files changed (2) hide show
  1. config.json +2 -3
  2. generation_config.json +1 -3
config.json CHANGED
@@ -7,7 +7,6 @@
7
  "activation_function": "gelu",
8
  "attention_dropout": 0.0,
9
  "dropout": 0.0,
10
- "dtype": "bfloat16",
11
  "hidden_size": 1280,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 5120,
@@ -26,8 +25,7 @@
26
  "projector_hidden_act": "gelu",
27
  "text_config": {
28
  "attention_dropout": 0.0,
29
- "bos_token_id": 151643,
30
- "dtype": "bfloat16",
31
  "eos_token_id": 151645,
32
  "hidden_act": "silu",
33
  "hidden_size": 3584,
@@ -70,6 +68,7 @@
70
  "num_attention_heads": 28,
71
  "num_hidden_layers": 28,
72
  "num_key_value_heads": 4,
 
73
  "rms_norm_eps": 1e-06,
74
  "rope_parameters": {
75
  "rope_theta": 1000000.0,
 
7
  "activation_function": "gelu",
8
  "attention_dropout": 0.0,
9
  "dropout": 0.0,
 
10
  "hidden_size": 1280,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 5120,
 
25
  "projector_hidden_act": "gelu",
26
  "text_config": {
27
  "attention_dropout": 0.0,
28
+ "bos_token_id": 151670,
 
29
  "eos_token_id": 151645,
30
  "hidden_act": "silu",
31
  "hidden_size": 3584,
 
68
  "num_attention_heads": 28,
69
  "num_hidden_layers": 28,
70
  "num_key_value_heads": 4,
71
+ "pad_token_id": 151671,
72
  "rms_norm_eps": 1e-06,
73
  "rope_parameters": {
74
  "rope_theta": 1000000.0,
generation_config.json CHANGED
@@ -1,8 +1,6 @@
1
  {
2
  "bos_token_id": 151670,
3
- "eos_token_id": [
4
- 151645
5
- ],
6
  "max_new_tokens": 2048,
7
  "pad_token_id": 151671,
8
  "transformers_version": "5.0.0.dev0"
 
1
  {
2
  "bos_token_id": 151670,
3
+ "eos_token_id": 151645,
 
 
4
  "max_new_tokens": 2048,
5
  "pad_token_id": 151671,
6
  "transformers_version": "5.0.0.dev0"