ThomasTheMaker commited on
Commit
afee35a
·
verified ·
1 Parent(s): f997601

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. adapter_config.json +6 -6
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -17,7 +17,7 @@
17
  "layers_pattern": null,
18
  "layers_to_transform": null,
19
  "loftq_config": {},
20
- "lora_alpha": 64,
21
  "lora_bias": false,
22
  "lora_dropout": 0.05,
23
  "megatron_config": null,
@@ -25,17 +25,17 @@
25
  "modules_to_save": null,
26
  "peft_type": "LORA",
27
  "qalora_group_size": 16,
28
- "r": 32,
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
32
- "v_proj",
33
- "q_proj",
34
  "up_proj",
 
35
  "o_proj",
36
  "k_proj",
37
- "gate_proj",
38
- "down_proj"
 
39
  ],
40
  "target_parameters": null,
41
  "task_type": "CAUSAL_LM",
 
17
  "layers_pattern": null,
18
  "layers_to_transform": null,
19
  "loftq_config": {},
20
+ "lora_alpha": 128,
21
  "lora_bias": false,
22
  "lora_dropout": 0.05,
23
  "megatron_config": null,
 
25
  "modules_to_save": null,
26
  "peft_type": "LORA",
27
  "qalora_group_size": 16,
28
+ "r": 64,
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
 
 
32
  "up_proj",
33
+ "q_proj",
34
  "o_proj",
35
  "k_proj",
36
+ "down_proj",
37
+ "v_proj",
38
+ "gate_proj"
39
  ],
40
  "target_parameters": null,
41
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4ac31d1c3101b1476f65d846d19f1a52df68274093fb2f6d1fd241631a529fdf
3
- size 104414464
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7502c0ee260451bd4de410b7e75400a1854907e4e533b877ff6d1810756b1287
3
+ size 208780928