Update config.json
Browse files- config.json +1 -2
config.json
CHANGED
|
@@ -12,12 +12,12 @@
|
|
| 12 |
"dynamic_image_size": true,
|
| 13 |
"force_image_size": 448,
|
| 14 |
"llm_config": {
|
| 15 |
-
"_attn_implementation_autoset": true,
|
| 16 |
"_name_or_path": "/tmp/huggingface_cache/Qwen3-1.7B-Instruct",
|
| 17 |
"add_cross_attention": false,
|
| 18 |
"architectures": [
|
| 19 |
"Qwen3ForCausalLM"
|
| 20 |
],
|
|
|
|
| 21 |
"attention_bias": false,
|
| 22 |
"attention_dropout": 0.0,
|
| 23 |
"bad_words_ids": null,
|
|
@@ -110,7 +110,6 @@
|
|
| 110 |
"use_llm_lora": 0,
|
| 111 |
"use_thumbnail": true,
|
| 112 |
"vision_config": {
|
| 113 |
-
"_attn_implementation_autoset": true,
|
| 114 |
"_name_or_path": "",
|
| 115 |
"add_cross_attention": false,
|
| 116 |
"architectures": [
|
|
|
|
| 12 |
"dynamic_image_size": true,
|
| 13 |
"force_image_size": 448,
|
| 14 |
"llm_config": {
|
|
|
|
| 15 |
"_name_or_path": "/tmp/huggingface_cache/Qwen3-1.7B-Instruct",
|
| 16 |
"add_cross_attention": false,
|
| 17 |
"architectures": [
|
| 18 |
"Qwen3ForCausalLM"
|
| 19 |
],
|
| 20 |
+
"attn_implementation": "flash_attention_2",
|
| 21 |
"attention_bias": false,
|
| 22 |
"attention_dropout": 0.0,
|
| 23 |
"bad_words_ids": null,
|
|
|
|
| 110 |
"use_llm_lora": 0,
|
| 111 |
"use_thumbnail": true,
|
| 112 |
"vision_config": {
|
|
|
|
| 113 |
"_name_or_path": "",
|
| 114 |
"add_cross_attention": false,
|
| 115 |
"architectures": [
|