ehristoforu commited on
Commit
ee57d03
·
verified ·
1 Parent(s): 095b499
.gitattributes CHANGED
@@ -34,4 +34,3 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  tokenizer.json filter=lfs diff=lfs merge=lfs -text
37
- assets/recipe.png filter=lfs diff=lfs merge=lfs -text
 
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  tokenizer.json filter=lfs diff=lfs merge=lfs -text
 
config.json CHANGED
@@ -63,7 +63,7 @@
63
  "tie_word_embeddings": true,
64
  "torch_dtype": "float16",
65
  "transformers_version": "4.55.4",
66
- "unsloth_version": "2025.9.1",
67
  "use_cache": true,
68
  "use_sliding_window": false,
69
  "vocab_size": 151936
 
63
  "tie_word_embeddings": true,
64
  "torch_dtype": "float16",
65
  "transformers_version": "4.55.4",
66
+ "unsloth_version": "2025.9.2",
67
  "use_cache": true,
68
  "use_sliding_window": false,
69
  "vocab_size": 151936
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fbd20ac444089473d50daebe65ec7a142e127755abd7cdc5e6e5a16611bf48e1
3
  size 4990818520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:020a24bd2797e9656763dd3e5724d1354d96e4528376d586286f1524ab42d981
3
  size 4990818520
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:892346133e2137a9fc023c700e4f7ddb528e5d44808709257882d396b2569ff2
3
  size 3054163080
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c01cb1afdef84ee749a574801efeb1bf6c9648736d9218bfbd079739930da49a
3
  size 3054163080
tokenizer_config.json CHANGED
@@ -233,7 +233,7 @@
233
  "extra_special_tokens": {},
234
  "model_max_length": 40960,
235
  "pad_token": "<|endoftext|>",
236
- "padding_side": "right",
237
  "split_special_tokens": false,
238
  "tokenizer_class": "Qwen2Tokenizer",
239
  "unk_token": null
 
233
  "extra_special_tokens": {},
234
  "model_max_length": 40960,
235
  "pad_token": "<|endoftext|>",
236
+ "padding_side": "left",
237
  "split_special_tokens": false,
238
  "tokenizer_class": "Qwen2Tokenizer",
239
  "unk_token": null