tanlonghua commited on
Commit
7d26150
·
1 Parent(s): 4ce8588

Ming-flash-omni-Preview init

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +64 -0
  2. am.mvn +8 -0
  3. byt5/byt5.json +18 -0
  4. byt5/byt5_mapper/byt5_mapper.pt +3 -0
  5. byt5/byt5_model/base.pt +3 -0
  6. byt5/byt5_model/byt5_model.pt +3 -0
  7. byt5/color_idx.json +1 -0
  8. byt5/font_uni_10-lang_idx.json +1 -0
  9. byt5/google__byt5-smal/.gitattributes +17 -0
  10. byt5/google__byt5-smal/README.md +158 -0
  11. byt5/google__byt5-smal/config.json +28 -0
  12. byt5/google__byt5-smal/flax_model.msgpack +3 -0
  13. byt5/google__byt5-smal/generation_config.json +7 -0
  14. byt5/google__byt5-smal/pytorch_model.bin +3 -0
  15. byt5/google__byt5-smal/special_tokens_map.json +1 -0
  16. byt5/google__byt5-smal/tf_model.h5 +3 -0
  17. byt5/google__byt5-smal/tokenizer_config.json +1 -0
  18. config.json +290 -0
  19. connector/config.json +28 -0
  20. connector/generation_config.json +14 -0
  21. connector/model-00001-of-00002.safetensors +3 -0
  22. connector/model-00002-of-00002.safetensors +3 -0
  23. connector/model.safetensors.index.json +345 -0
  24. mlp/config.json +11 -0
  25. mlp/model.safetensors +3 -0
  26. model-00001-of-00042.safetensors +3 -0
  27. model-00002-of-00042.safetensors +3 -0
  28. model-00003-of-00042.safetensors +3 -0
  29. model-00004-of-00042.safetensors +3 -0
  30. model-00005-of-00042.safetensors +3 -0
  31. model-00006-of-00042.safetensors +3 -0
  32. model-00007-of-00042.safetensors +3 -0
  33. model-00008-of-00042.safetensors +3 -0
  34. model-00009-of-00042.safetensors +3 -0
  35. model-00010-of-00042.safetensors +3 -0
  36. model-00011-of-00042.safetensors +3 -0
  37. model-00012-of-00042.safetensors +3 -0
  38. model-00013-of-00042.safetensors +3 -0
  39. model-00014-of-00042.safetensors +3 -0
  40. model-00015-of-00042.safetensors +3 -0
  41. model-00016-of-00042.safetensors +3 -0
  42. model-00017-of-00042.safetensors +3 -0
  43. model-00018-of-00042.safetensors +3 -0
  44. model-00019-of-00042.safetensors +3 -0
  45. model-00020-of-00042.safetensors +3 -0
  46. model-00021-of-00042.safetensors +3 -0
  47. model-00022-of-00042.safetensors +3 -0
  48. model-00023-of-00042.safetensors +3 -0
  49. model-00024-of-00042.safetensors +3 -0
  50. model-00025-of-00042.safetensors +3 -0
.gitattributes CHANGED
@@ -33,3 +33,67 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+
37
+
38
+
39
+ byt5/byt5_mapper/byt5_mapper.pt filter=lfs diff=lfs merge=lfs -text
40
+ transformer/diffusion_pytorch_model-00004-of-00004.safetensors filter=lfs diff=lfs merge=lfs -text
41
+ mlp/model.safetensors filter=lfs diff=lfs merge=lfs -text
42
+ transformer/diffusion_pytorch_model-00001-of-00004.safetensors filter=lfs diff=lfs merge=lfs -text
43
+ model-00002-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
44
+ byt5/google__byt5-smal/tf_model.h5 filter=lfs diff=lfs merge=lfs -text
45
+ model-00008-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
46
+ connector/model-00002-of-00002.safetensors filter=lfs diff=lfs merge=lfs -text
47
+ connector/model-00001-of-00002.safetensors filter=lfs diff=lfs merge=lfs -text
48
+ model-00004-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
49
+ model-00006-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
50
+ byt5/google__byt5-smal/flax_model.msgpack filter=lfs diff=lfs merge=lfs -text
51
+ model-00012-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
52
+ model-00003-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
53
+ model-00010-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
54
+ model-00009-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
55
+ transformer/diffusion_pytorch_model-00003-of-00004.safetensors filter=lfs diff=lfs merge=lfs -text
56
+ byt5/byt5_model/byt5_model.pt filter=lfs diff=lfs merge=lfs -text
57
+ model-00001-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
58
+ model-00005-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
59
+ byt5/google__byt5-smal/pytorch_model.bin filter=lfs diff=lfs merge=lfs -text
60
+ byt5/byt5_model/base.pt filter=lfs diff=lfs merge=lfs -text
61
+ model-00011-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
62
+ model-00020-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
63
+ model-00013-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
64
+ model-00017-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
65
+ model-00015-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
66
+ model-00014-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
67
+ model-00016-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
68
+ model-00024-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
69
+ model-00023-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
70
+ model-00025-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
71
+ model-00018-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
72
+ model-00021-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
73
+ talker/llm/tokenizer.json filter=lfs diff=lfs merge=lfs -text
74
+ model-00037-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
75
+ model-00027-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
76
+ model-00026-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
77
+ model-00029-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
78
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
79
+ model-00028-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
80
+ model-00038-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
81
+ model-00030-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
82
+ model-00039-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
83
+ model-00031-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
84
+ model-00042-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
85
+ model-00040-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
86
+ model-00033-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
87
+ talker/model.safetensors filter=lfs diff=lfs merge=lfs -text
88
+ model-00041-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
89
+ talker/vae/model.safetensors filter=lfs diff=lfs merge=lfs -text
90
+ model-00032-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
91
+ vae/diffusion_pytorch_model.safetensors filter=lfs diff=lfs merge=lfs -text
92
+ model-00034-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
93
+ model-00035-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
94
+ transformer/diffusion_pytorch_model-00002-of-00004.safetensors filter=lfs diff=lfs merge=lfs -text
95
+ model-00007-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
96
+ model-00019-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
97
+ model-00022-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
98
+ model-00036-of-00042.safetensors filter=lfs diff=lfs merge=lfs -text
99
+
am.mvn ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ <Nnet>
2
+ <Splice> 560 560
3
+ [ 0 ]
4
+ <AddShift> 560 560
5
+ <LearnRateCoef> 0 [ -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 -8.311879 -8.600912 -9.615928 -10.43595 -11.21292 -11.88333 -12.36243 -12.63706 -12.8818 -12.83066 -12.89103 -12.95666 -13.19763 -13.40598 -13.49113 -13.5546 -13.55639 -13.51915 -13.68284 -13.53289 -13.42107 -13.65519 -13.50713 -13.75251 -13.76715 -13.87408 -13.73109 -13.70412 -13.56073 -13.53488 -13.54895 -13.56228 -13.59408 -13.62047 -13.64198 -13.66109 -13.62669 -13.58297 -13.57387 -13.4739 -13.53063 -13.48348 -13.61047 -13.64716 -13.71546 -13.79184 -13.90614 -14.03098 -14.18205 -14.35881 -14.48419 -14.60172 -14.70591 -14.83362 -14.92122 -15.00622 -15.05122 -15.03119 -14.99028 -14.92302 -14.86927 -14.82691 -14.7972 -14.76909 -14.71356 -14.61277 -14.51696 -14.42252 -14.36405 -14.30451 -14.23161 -14.19851 -14.16633 -14.15649 -14.10504 -13.99518 -13.79562 -13.3996 -12.7767 -11.71208 ]
6
+ <Rescale> 560 560
7
+ <LearnRateCoef> 0 [ 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 0.155775 0.154484 0.1527379 0.1518718 0.1506028 0.1489256 0.147067 0.1447061 0.1436307 0.1443568 0.1451849 0.1455157 0.1452821 0.1445717 0.1439195 0.1435867 0.1436018 0.1438781 0.1442086 0.1448844 0.1454756 0.145663 0.146268 0.1467386 0.1472724 0.147664 0.1480913 0.1483739 0.1488841 0.1493636 0.1497088 0.1500379 0.1502916 0.1505389 0.1506787 0.1507102 0.1505992 0.1505445 0.1505938 0.1508133 0.1509569 0.1512396 0.1514625 0.1516195 0.1516156 0.1515561 0.1514966 0.1513976 0.1512612 0.151076 0.1510596 0.1510431 0.151077 0.1511168 0.1511917 0.151023 0.1508045 0.1505885 0.1503493 0.1502373 0.1501726 0.1500762 0.1500065 0.1499782 0.150057 0.1502658 0.150469 0.1505335 0.1505505 0.1505328 0.1504275 0.1502438 0.1499674 0.1497118 0.1494661 0.1493102 0.1493681 0.1495501 0.1499738 0.1509654 ]
8
+ </Nnet>
byt5/byt5.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "byt5_mapper_type": "T5EncoderBlockByT5Mapper",
3
+ "byt5_mapper_config": {
4
+ "num_layers": 4,
5
+ "sdxl_channels": 2048
6
+ },
7
+ "byt5_config": {
8
+ "byt5_ckpt_path": "./google__byt5-smal/",
9
+ "byt5_name": "google/byt5-small",
10
+ "special_token": true,
11
+ "color_special_token": true,
12
+ "font_special_token": true,
13
+ "font_ann_path": "./font_uni_10-lang_idx.json",
14
+ "color_ann_path": "./color_idx.json",
15
+ "multilingual": true
16
+ },
17
+ "byt5_max_length": 256
18
+ }
byt5/byt5_mapper/byt5_mapper.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f701be275008cbdfd89a797d775f9091922ad04479b30b3588df4aab633de3c9
3
+ size 301549007
byt5/byt5_model/base.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f0816c04e128bb5e71cf04167217e0121f145798bcfb8c76920f234989d6d2c
3
+ size 2991908042
byt5/byt5_model/byt5_model.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca8c97c89136f767d4534449bbf3f25296d390574e0af1cc16f09774a901d6db
3
+ size 877308845
byt5/color_idx.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"white": 0, "black": 1, "darkslategray": 2, "dimgray": 3, "darkolivegreen": 4, "midnightblue": 5, "saddlebrown": 6, "sienna": 7, "whitesmoke": 8, "darkslateblue": 9, "indianred": 10, "linen": 11, "maroon": 12, "khaki": 13, "sandybrown": 14, "gray": 15, "gainsboro": 16, "teal": 17, "peru": 18, "gold": 19, "snow": 20, "firebrick": 21, "crimson": 22, "chocolate": 23, "tomato": 24, "brown": 25, "goldenrod": 26, "antiquewhite": 27, "rosybrown": 28, "steelblue": 29, "floralwhite": 30, "seashell": 31, "darkgreen": 32, "oldlace": 33, "darkkhaki": 34, "burlywood": 35, "red": 36, "darkgray": 37, "orange": 38, "royalblue": 39, "seagreen": 40, "lightgray": 41, "tan": 42, "coral": 43, "beige": 44, "palevioletred": 45, "wheat": 46, "lavender": 47, "darkcyan": 48, "slateblue": 49, "slategray": 50, "orangered": 51, "silver": 52, "olivedrab": 53, "forestgreen": 54, "darkgoldenrod": 55, "ivory": 56, "darkorange": 57, "yellow": 58, "hotpink": 59, "ghostwhite": 60, "lightcoral": 61, "indigo": 62, "bisque": 63, "darkred": 64, "darksalmon": 65, "lightslategray": 66, "dodgerblue": 67, "lightpink": 68, "mistyrose": 69, "mediumvioletred": 70, "cadetblue": 71, "deeppink": 72, "salmon": 73, "palegoldenrod": 74, "blanchedalmond": 75, "lightseagreen": 76, "cornflowerblue": 77, "yellowgreen": 78, "greenyellow": 79, "navajowhite": 80, "papayawhip": 81, "mediumslateblue": 82, "purple": 83, "blueviolet": 84, "pink": 85, "cornsilk": 86, "lightsalmon": 87, "mediumpurple": 88, "moccasin": 89, "turquoise": 90, "mediumseagreen": 91, "lavenderblush": 92, "mediumblue": 93, "darkseagreen": 94, "mediumturquoise": 95, "paleturquoise": 96, "skyblue": 97, "lemonchiffon": 98, "olive": 99, "peachpuff": 100, "lightyellow": 101, "lightsteelblue": 102, "mediumorchid": 103, "plum": 104, "darkturquoise": 105, "aliceblue": 106, "mediumaquamarine": 107, "orchid": 108, "powderblue": 109, "blue": 110, "darkorchid": 111, "violet": 112, "lightskyblue": 113, "lightcyan": 114, "lightgoldenrodyellow": 115, "navy": 116, "thistle": 117, "honeydew": 118, "mintcream": 119, "lightblue": 120, "darkblue": 121, "darkmagenta": 122, "deepskyblue": 123, "magenta": 124, "limegreen": 125, "darkviolet": 126, "cyan": 127, "palegreen": 128, "aquamarine": 129, "lawngreen": 130, "lightgreen": 131, "azure": 132, "chartreuse": 133, "green": 134, "mediumspringgreen": 135, "lime": 136, "springgreen": 137}
byt5/font_uni_10-lang_idx.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"en-YACgEQNAr7w,1": 0, "en-YAFdJjbTu24,1": 1, "en-YACgETiWKS8,0": 2, "en-YAD7Qybjw1I,1": 3, "en-YACgEQNAr7w,0": 4, "en-YAFdJrJllfA,0": 5, "en-YAFdJi-0PQg,0": 6, "en-YACgEcnJpjs,0": 7, "en-YAD1aU3sLnI,0": 8, "en-YAFdJs2qTWQ,0": 9, "en-YAD7Qybjw1I,0": 10, "en-YAEgD4_ouE0,0": 11, "en-YACgEcYqQ-A,0": 12, "en-YACgEf4fYmc,0": 13, "en-YAFdJpYtCxE,0": 14, "en-YACgEev4gKc,0": 15, "en-YAEszk0JTZ8,0": 16, "en-YACgEbc43jc,0": 17, "en-YAFdJvSyp_k,2": 18, "en-YAFdJjbTu24,9": 19, "en-YAFdJjbTu24,14": 20, "en-YAFdJsjwyrY,0": 21, "en-YAFdJhem5V8,0": 22, "en-YAD_MiDU3xU,1": 23, "en-YADXm3pZ1HU,0": 24, "en-YAEK9t6iSGk,0": 25, "en-YAFLd8sKbwc,1": 26, "en-YAD1bxogMz4,1": 27, "en-YAFdJu4q1FA,0": 28, "en-YADK3w_fTc4,0": 29, "en-YAFdJjvw9Ps,0": 30, "en-YAFdJsjyMsM,0": 31, "en-YAFdJvl8raw,0": 32, "en-YAEEKFXsRzg,0": 33, "en-YAFdJhmxbVQ,0": 34, "en-YAFdJgul-2U,0": 35, "en-YACgEb2ZvHQ,0": 36, "en-YACgEXvxf8Q,0": 37, "en-YACgEZulLoA,0": 38, "en-YACkoM60Ufo,0": 39, "en-YALBs6IPQQs,0": 40, "en-YADK34qJank,0": 41, "en-YACgEUFdPdA,0": 42, "en-YAEY8qS0Of8,0": 43, "en-YACgESME5ew,0": 44, "en-YAEK94THsCY,0": 45, "en-YACgEZml080,0": 46, "en-YADrvjt3J6s,0": 47, "en-YAFdJsnKuGg,0": 48, "en-YAFdJpCEKCQ,0": 49, "en-YAFdJoNRMmU,0": 50, "en-YAFdJrIBtvg,0": 51, "en-YAFdtQi73Xs,10": 52, "en-YACgEVY5j_w,0": 53, "en-YACgES4gM44,0": 54, "en-YACkoGagfvs,0": 55, "en-YACkoGIT9qo,0": 56, "en-YALBszUSD-E,0": 57, "en-YACgEQY10lw,0": 58, "en-YADK36ab19U,0": 59, "en-YAFdtZAC0Mk,0": 60, "en-YAEblIoU62c,0": 61, "en-YAFdJsxY6B0,0": 62, "en-YAFdJnQq22A,0": 63, "en-YAFdJkJYP7I,0": 64, "en-YAD1aYG82rc,0": 65, "en-YAFdJjZxcpY,0": 66, "en-YACgEUJSCy8,0": 67, "en-YAFdJvSWHmc,0": 68, "en-YAFdtQJYB9w,0": 69, "en-YACkoNqeXKQ,0": 70, "en-YADRXsx8tSQ,0": 71, "en-YAFdJl3zAt0,0": 72, "en-YAEblJCYpYI,0": 73, "en-YADK3-gZSDU,0": 74, "en-YACgESFItrk,0": 75, "en-YACgEQzTv38,0": 76, "en-YADpK2K4GhA,0": 77, "en-YAFdJrnVElk,0": 78, "en-YAFdJh5vTu8,0": 79, "en-YAFdJgp7PFE,0": 80, "en-YAFdJu55qXI,0": 81, "en-YAFdJljEuFc,0": 82, "en-YAFdJs07v-8,0": 83, "en-YAEsdnpVGjo,0": 84, "en-YACgEan-tYE,0": 85, "en-YAEDY77AUF4,0": 86, "en-YAFdJi-0PQg,5": 87, "en-YALBs4GWnJw,0": 88, "en-YAFdte3F-n0,0": 89, "en-YACgEZ1OEPI,0": 90, "en-YAFdJt8dAY0,6": 91, "en-YAEblLMoH64,0": 92, "en-YAEDe2m9QEM,0": 93, "en-YAEOAIzk6jA,0": 94, "en-YACgEeV8bIQ,0": 95, "en-YAFdJii6DvE,0": 96, "en-YACgEZfALnA,0": 97, "en-YACgEfb36U4,0": 98, "en-YAEfHGJFE5c,0": 99, "en-YACgEblklxo,0": 100, "en-YAD86piO-AI,0": 101, "en-YADK4JHqBpg,0": 102, "en-YAFdJiLU_Ko,0": 103, "en-YAFdJigK8DM,0": 104, "en-YAFdJsEGkW0,0": 105, "en-YACgEV9owk4,0": 106, "en-YAD_Q_TxnIk,0": 107, "en-YAD1bzs0p0Y,0": 108, "en-YAFdJo1JGkU,0": 109, "en-YAEEKEdj87o,0": 110, "en-YADK4IiTTgI,0": 111, "en-YACgEfOAqSo,0": 112, "en-YACkoEA0Kc0,0": 113, "en-YAFdJoYMYSc,0": 114, "en-YADXXHvpRIU,0": 115, "en-YAE359pArE0,0": 116, "en-YACgEX27qLg,0": 117, "en-YAEl409VGBQ,0": 118, "en-YAFdJieXVAw,0": 119, "en-YAEgDxWkPNY,0": 120, "en-YACgEf9PiBg,0": 121, "en-YAFdJq4xlHw,0": 122, "en-YAFdJuFCnaw,3": 123, "en-YAFdJpU7Kkk,0": 124, "en-YACgEZbkUVE,0": 125, "en-YACgEa7uPMI,0": 126, "en-YAFdJib8xW8,0": 127, "en-YAEEKJVWzzw,0": 128, "en-YACkoP2nN4w,0": 129, "en-YAD0vgIBnsw,0": 130, "en-YAEEKKQrAAg,0": 131, "en-YAD86m_J1ck,0": 132, "en-YAEFznXsRNE,0": 133, "en-YAFdJmA6sOY,0": 134, "en-YAEwfQVqdIo,0": 135, "en-YAFdJroF2v0,0": 136, "en-YAEKEie4k1E,0": 137, "en-YAFdJl36Ts4,0": 138, "en-YAFdJi-0PQg,2": 139, "en-YALBs4z9dTA,0": 140, "en-YAFdJmibngQ,0": 141, "en-YAFdJllHsUM,0": 142, "en-YAFdJrHPPtU,0": 143, "en-YACgERDU--Q,0": 144, "en-YAFdtfahXt8,0": 145, "en-YADK31-VBBc,1": 146, "en-YAD9UdTHECk,0": 147, "en-YAEwfGkcZTE,0": 148, "en-YADK4O60vp0,0": 149, "en-YAEsdklG4-k,0": 150, "en-YAFdJh4apno,0": 151, "en-YAFdJrN-O4g,0": 152, "en-YACgEUY9wwg,0": 153, "en-YAFdJnyMeQE,0": 154, "en-YADW1-vpsoc,0": 155, "en-YADa1fJyh1I,0": 156, "en-YAD1aFjegN8,0": 157, "en-YAD1aLikJo8,0": 158, "en-YAD87qSfQDM,0": 159, "en-YAEK9o7SBVI,0": 160, "en-YAEZxhW1Plk,0": 161, "en-YAFdJporQ7M,0": 162, "en-YAFdJgWLPss,0": 163, "en-YAFdJhmxbVQ,6": 164, "en-YAEN_-GqMRQ,0": 165, "en-YAEblOT1UVE,0": 166, "en-YAFdJpOISlU,0": 167, "en-YALBs4t1m58,0": 168, "en-YAEnTI0o408,0": 169, "en-YAE2gr0Mbng,0": 170, "en-YACgEeT8btw,0": 171, "en-YAEnXArs1iQ,0": 172, "en-YACgEdYQidM,0": 173, "en-YAFdJjUiQoM,0": 174, "en-YADK4L2nGbg,0": 175, "en-YACgEbBJLy8,0": 176, "en-YACgEYmkkbg,0": 177, "en-YADZ-SGdpBs,1": 178, "en-YACgEQzTJgs,0": 179, "en-YAEnS2G4JLg,0": 180, "en-YACgEYG4kG4,0": 181, "en-YAFdJlXyCuc,0": 182, "en-YAEDY0HKiGw,0": 183, "en-YAFdJgT0SHI,0": 184, "en-YAD87vDtOPY,0": 185, "en-YAD87jbyiH0,0": 186, "en-YAEnXEEs5-Q,0": 187, "en-YACgEcL5b3s,0": 188, "en-YACgEZ-ZslE,0": 189, "en-YAFdJvgJ5TU,0": 190, "en-YAEEKCPvBkA,0": 191, "en-YAFdJuxxqp0,0": 192, "en-YACkoGNq-XY,0": 193, "en-YAFdJtfMD6Y,0": 194, "en-YACgEWNFnT4,0": 195, "en-YAFdJswIKAg,0": 196, "en-YAEtfsBG_F4,0": 197, "en-YAEgKFEI03k,0": 198, "en-YAFdJoBYX9o,0": 199, "en-YAEwfdrEBqg,0": 200, "en-YAEl5ALPGW8,0": 201, "en-YACgEcUZ6Q0,0": 202, "en-YACgEcnnqB4,0": 203, "en-YAEEKHuGbjo,0": 204, "en-YACgEfdj394,0": 205, "en-YAEqenDlk44,0": 206, "en-YACkoN-xg4g,0": 207, "en-YAEKEnz9g64,0": 208, "en-YACgEYbsV-E,0": 209, "en-YACgEaGjxus,0": 210, "en-YADK4PDlAAM,0": 211, "en-YADK32AK2Uk,1": 212, "en-YACgERSYqTw,0": 213, "en-YAEkpH0_xj8,0": 214, "en-YAFdJiev_Yw,0": 215, "en-YAFdJnTJPB4,0": 216, "en-YAEzvsyzAMI,0": 217, "en-YACgEcU7Qg4,0": 218, "en-YAFdJriKtFM,0": 219, "en-YAFdJlTZ1NI,0": 220, "en-YAEl49DnZBU,0": 221, "en-YACgEYSBEEA,0": 222, "en-YAEtfq3j4CA,0": 223, "en-YAFdJvAvvwQ,0": 224, "en-YACgEYS9sJU,0": 225, "en-YAEblGlbZeY,0": 226, "en-YAFdJld5jms,0": 227, "en-YAFcfr0ZwUA,0": 228, "en-YACgET-7cBI,0": 229, "en-YAEp6cVB820,1": 230, "en-YAFdJuBuAxg,0": 231, "en-YAErUQDw3VY,0": 232, "en-YADK4LSvvyE,0": 233, "en-YAD0tAtrwAQ,0": 234, "en-YAFcfuZZeUg,0": 235, "en-YAEblFjUZbo,0": 236, "en-YAEe9XbWpNs,0": 237, "en-YACgEdRnAX0,0": 238, "en-YAFdJuaTEaM,0": 239, "en-YAEl5CGw_K4,0": 240, "en-YAFdJqpPvDU,0": 241, "en-YAFdJsxBlTg,0": 242, "en-YAFPtjs8JGg,0": 243, "en-YADK4NKG4TI,0": 244, "en-YACgEfRt9BY,0": 245, "en-YAErQqMI9Kg,0": 246, "en-YAEBJpNF3qU,0": 247, "en-YAFdJpAlwLA,0": 248, "en-YAEzv1-o0Yo,0": 249, "en-YADcrgBnGlo,0": 250, "en-YAFdJlhd_TU,0": 251, "en-YACgERFuAbY,0": 252, "en-YACgEf17hXM,0": 253, "en-YAFdJqy4qo0,0": 254, "en-YACkoNM8PHk,0": 255, "en-YAD9UUZHL7I,0": 256, "en-YAEDY-JdnLA,0": 257, "en-YADK34sD2QM,0": 258, "en-YACkoBhm29U,0": 259, "en-YACgEaBTFMQ,0": 260, "en-YAEp0r5sqEk,0": 261, "en-YAFdJgCyb_c,0": 262, "en-YAEnXdJOdyU,0": 263, "en-YAFdJsG7tjE,0": 264, "en-YALBs2IK6rw,0": 265, "en-YAFdJuuBNyM,0": 266, "en-YAEl449ycJ8,0": 267, "en-YAFdJjtB23o,0": 268, "en-YAEwfVo3f4M,0": 269, "en-YAEz2L9phwY,0": 270, "en-YAFdJqX996o,0": 271, "en-YACgEZHPKsk,0": 272, "en-YAExnPxTSiY,0": 273, "en-YAD1aHa0r9w,0": 274, "en-YAFdJksXcAk,0": 275, "en-YAFdJgh5nak,0": 276, "en-YALBs2pFj2o,0": 277, "en-YAEzwIsE44Y,0": 278, "en-YACgEUaXOJg,0": 279, "en-YAEt1cjGqjs,0": 280, "en-YAD1fewp_mA,0": 281, "en-YAFBcXIeUro,0": 282, "en-YAEKEi3zNJo,0": 283, "en-YAEblKUQHes,0": 284, "en-YAFdJkmvhGI,0": 285, "en-YACgEbcqMPM,0": 286, "en-YAEl5HuzpNE,0": 287, "en-YACgEVEOb9k,0": 288, "en-YACgEYVlm9Q,0": 289, "en-YAFcfgcNhWk,2": 290, "en-YAECFubU7k0,0": 291, "en-YAFdJs8L2aw,0": 292, "en-YAD86jMspBA,0": 293, "en-YAFdJtdLX_k,0": 294, "en-YAEtLxoPSMY,0": 295, "en-YACkoOF0Xck,0": 296, "en-YADK4PnlFRs,0": 297, "en-YAEY82Qa8Gk,0": 298, "en-YAFdJvPDcxY,0": 299, "en-YADrnbQE49o,0": 300, "en-YAFdJlJTV2I,0": 301, "en-YALBs8ZVRhw,0": 302, "en-YAEw13FITIg,0": 303, "en-YACgEc9BFCs,0": 304, "en-YAEl5FGU3WA,0": 305, "en-YAD1aIFxe3I,0": 306, "en-YACgEa-oJH8,0": 307, "en-YADZ-Uvo1tU,0": 308, "en-YAEbvohm0UE,0": 309, "en-YAFdJrsoN0U,0": 310, "en-YACgEUrhTw0,0": 311, "en-YAD87juPgMk,0": 312, "en-YADpK5MnTWc,0": 313, "en-YADWjvg7BXI,3": 314, "en-YAD_Q5-M58k,0": 315, "en-YAEwfYBiD_M,0": 316, "en-YAEl5FtA9yE,0": 317, "en-YACgES4qGnk,0": 318, "en-YAFdJnT_B0Y,0": 319, "en-YAFdJhem5V8,2": 320, "en-YACgEbSBm0k,0": 321, "en-YALBs-plo-0,0": 322, "en-YAFdJnJADb0,0": 323, "en-YACgEbw4BXM,0": 324, "en-YAFdJsZqKzQ,0": 325, "en-YAEQhEbd9yc,0": 326, "en-YAD1acp_tgc,0": 327, "en-YAEN_4MsRF0,0": 328, "en-YAFdJg7KuRM,0": 329, "en-YACgEWWPgRQ,0": 330, "en-YAEl5AUzZYw,0": 331, "en-YAFdJkQTgbI,10": 332, "en-YAD1b1xytFY,0": 333, "en-YAEblEgQM4I,0": 334, "en-YAEp6dGWhEw,0": 335, "en-YACgEf1HUE0,0": 336, "en-YACgEZP8fb0,0": 337, "en-YAEK9kW1tuk,0": 338, "en-YAFdJh7OuNE,0": 339, "en-YALBs-mIj2o,0": 340, "en-YAEp6N5LTlI,0": 341, "en-YAFdJop5r5s,0": 342, "en-YACgEZJ1Flw,0": 343, "en-YACkoHwyorc,0": 344, "en-YACgEWqX9J4,0": 345, "en-YAEblI1lTSo,0": 346, "en-YAEdVbQZErY,0": 347, "en-YADK3w19fbU,0": 348, "en-YAEl5I3BtSQ,0": 349, "en-YADK30HQjSs,3": 350, "en-YADZ-Xh6OAQ,1": 351, "en-YACgESE7eJQ,0": 352, "en-YADiGTsvS-0,0": 353, "en-YAEDY9_L0nI,0": 354, "en-YACkoFYrTFU,3": 355, "en-YAFdJjbTu24,0": 356, "en-YADpK85a6bg,0": 357, "en-YACgEaymIoM,0": 358, "en-YAFdJk33d3s,0": 359, "en-YAFdJsttDqg,0": 360, "en-YALBs4GnhSI,0": 361, "en-YAD1aZCCYU8,0": 362, "en-YACgEap1YR0,0": 363, "en-YAFcfjveRFU,0": 364, "en-YAEblNfoaG0,0": 365, "en-YAFdJkrxrTY,0": 366, "en-YAFdJkBmHI8,0": 367, "en-YAD5jdnkFxc,0": 368, "en-YAEblM3qm7o,0": 369, "en-YAFdJslrMiA,0": 370, "en-YAFdJiyJLIs,0": 371, "en-YAD1aIha89c,0": 372, "en-YAEl44ODsfk,0": 373, "en-YACgEbGmrJs,0": 374, "en-YACkoCjjZNk,0": 375, "en-YAEl5OhdtuU,0": 376, "en-YAEDY0oiFqg,0": 377, "en-YACgEQQ14jI,0": 378, "en-YAFdJscRBb4,0": 379, "en-YAD8SnSjSFI,0": 380, "en-YAEwfB6Cpd8,0": 381, "en-YAEEKNsy3Ac,0": 382, "en-YAD-bcxF3yc,0": 383, "en-YAEqekHBffY,0": 384, "en-YAEp6DuM3Gw,0": 385, "en-YALBs1aLP9I,0": 386, "en-YAEDY6vXgZk,0": 387, "en-YAFdJt16xgA,0": 388, "en-YACgEVOejZc,0": 389, "en-YAEzv4VxM0o,0": 390, "en-YAEe2QG9Lj0,0": 391, "en-YAEahpCHYgg,0": 392, "en-YAEdVfBznB8,0": 393, "en-YAFdJqiomRQ,0": 394, "en-YAEtLr6LTP8,0": 395, "en-YAD1hHx0kVU,0": 396, "en-YAEOAO_oFEQ,0": 397, "en-YAD1bzJCL-s,0": 398, "en-YAEw12mzUpM,0": 399, "en-YACgEf5XTk0,0": 400, "en-YAFdJjhPExE,0": 401, "en-YACgEV-yq04,0": 402, "en-YAD-3G9v1ec,0": 403, "en-YAD1aDS1Jq8,0": 404, "en-YAEt1AyAvLE,0": 405, "en-YAFdJhmdIMM,4": 406, "en-YADK4GqZ1CM,0": 407, "en-YAD5qTdjGKo,0": 408, "en-YAEW5XJIpFQ,0": 409, "en-YADZ-ayW8Wk,0": 410, "en-YAEw2GhSLyw,0": 411, "en-YADZ-Xou1-M,0": 412, "en-YACgEXD5I60,2": 413, "en-YALBsxS5LCo,0": 414, "en-YADK4Fi0R2c,0": 415, "en-YACkoE390M8,0": 416, "en-YAEp7P1MGw0,0": 417, "en-YAEW5dh43hM,0": 418, "en-YACgEdeqDWI,0": 419, "en-YACgEcB-r-s,0": 420, "en-YACgEf5ivwU,0": 421, "en-YAFdJtqKbY0,0": 422, "en-YACgEVR7RtU,0": 423, "en-YALBs-lGe50,0": 424, "en-YAEblMBc9kg,0": 425, "en-YAFShan5_TM,0": 426, "en-YADK3_DWcOc,0": 427, "en-YAFdJu1N8sU,0": 428, "en-YAD1b5m4a2U,0": 429, "en-YALBs2ploWQ,0": 430, "en-YAE3C_w4eo8,0": 431, "en-YACkoD1yZN0,0": 432, "en-YAFdJo1JGkU,2": 433, "en-YADZ-fX8WFg,0": 434, "en-YAFdJqDa7Hs,0": 435, "en-YADilkdu4yg,0": 436, "en-YAFdJoWmhxU,0": 437, "en-YAECwRPX1Xc,0": 438, "en-YAFdJtYyURU,0": 439, "en-YAEp7LGZQcs,0": 440, "en-YALBs_i4tWU,0": 441, "en-YACkoOFEi8A,0": 442, "en-YACkoOrpjtg,0": 443, "en-YAEl5Lrxk80,0": 444, "en-YAEl4_uvFuc,0": 445, "en-YAFdtTOE9xo,0": 446, "en-YADZ-TlaGts,0": 447, "en-YACkoIWuJIg,0": 448, "en-YAFdJtkxLaU,0": 449, "en-YADK4OIbCxo,0": 450, "en-YAEt2u1JIto,0": 451, "en-YAEHOu6Gglo,0": 452, "en-YACgEa7h_Rs,0": 453, "en-YACgEcglnTk,0": 454, "en-YAEblJBnHeE,0": 455, "en-YAFdJpfTcWk,0": 456, "en-YAEqegJ2MAE,0": 457, "en-YACgEfiK7GI,0": 458, "en-YAEl4-ppXpY,0": 459, "en-YAEtfpKdmV8,0": 460, "en-YAFdJvK7TsQ,0": 461, "en-YAFdJp0SJos,0": 462, "en-YADK4EFto8o,0": 463, "en-YAD0tJJIJ9k,0": 464, "en-YAEK9jLiVVQ,0": 465, "en-YALBsxcmblk,0": 466, "en-YAFdJi-0PQg,7": 467, "en-YAFcfr6JQmY,0": 468, "en-YAD0tK8KtxA,0": 469, "en-YAEwfDWdL1Q,0": 470, "en-YAEDY6_5_7w,0": 471, "en-YAFdJqVlYFE,0": 472, "en-YALBs2pfuW0,0": 473, "en-YAEY67jaCX4,0": 474, "en-YALBs65Jhw0,0": 475, "en-YAFdJgXyVZM,0": 476, "en-YAD0xKJP184,0": 477, "en-YAD02XHz9x0,0": 478, "en-YADK3wExL94,0": 479, "en-YAFO9PKnZ5M,0": 480, "en-YAEKEqBSzys,0": 481, "en-YAEN_-C_FEM,0": 482, "en-YACgEQesXis,0": 483, "en-YAFMT3WXROc,0": 484, "en-YACkoG71UKk,0": 485, "en-YADZ-VRpAXg,0": 486, "en-YAFge7Z-F6s,0": 487, "en-YAEp642Etm0,0": 488, "en-YAEl5JTmEjA,0": 489, "en-YAFcfuAu6aM,0": 490, "en-YAEzvwGEtgY,0": 491, "en-YACgEeRz73o,0": 492, "en-YAD-3UbKYbo,0": 493, "en-YAEwfYJNRYY,0": 494, "en-YAEblAWfbUM,0": 495, "en-YAD1aAfg0sc,0": 496, "en-YAEtfuYOYZQ,0": 497, "en-YADYEo5bdSo,0": 498, "en-YAFdJoWaWgo,0": 499, "en-YACgEesdoec,0": 500, "en-YAE7QgTH7QI,0": 501, "en-YACgEYweTR4,0": 502, "en-YACgETcZDx0,0": 503, "en-YADZ-b-nafs,0": 504, "en-YACkoCuIQTA,0": 505, "en-YAD0L-hZ2K4,0": 506, "en-YAEp6Z28lQs,0": 507, "en-YACgEYiNP_0,0": 508, "en-YAFdJslAyFM,0": 509, "en-YADK3zBTjLs,0": 510, "en-YAFdJmNtp_I,0": 511, "cn-YAD88uhsoWI,1": 0, "cn-YAD88sUatvg,2": 1, "cn-YAD88sUatvg,3": 2, "cn-YAD83ntI60U,1": 3, "cn-YAFdPjrVnnA,1": 4, "cn-YAD85fiweU8,1": 5, "cn-YAD85pph3W4,1": 6, "cn-YAD85fiweU8,3": 7, "cn-YAD88iKwQic,3": 8, "cn-YAD88iKwQic,2": 9, "cn-YAD88rKqzd4,1": 10, "cn-YAD8yBl1DNg,0": 11, "cn-YAEKj6refro,0": 12, "cn-YAD8yL1Zyxg,0": 13, "cn-YAD88g-yrHs,0": 14, "cn-YAD88rHl2jg,0": 15, "cn-YAD884Bk2tY,0": 16, "cn-YAD889JIRyI,0": 17, "cn-YAD8yM-dI1E,0": 18, "cn-YAD885Xn75g,0": 19, "cn-YAD8z6vwDME,0": 20, "cn-YAD83OzBjxM,0": 21, "cn-YAD82-jiKa4,0": 22, "cn-YAD88oRFcIw,0": 23, "cn-YAD88jfJsos,0": 24, "cn-YAEKj5CeMyM,0": 25, "cn-YAD88gLHMug,0": 26, "cn-YAEKj8kcH0E,0": 27, "cn-YAD82qDCRaE,0": 28, "cn-YAEKGHP23Po,0": 29, "cn-YAEKj4OFKmM,0": 30, "cn-YAD5fpjT65c,0": 31, "cn-YAD5fstTFyI,0": 32, "cn-YAD5fgW4ieM,0": 33, "cn-YAEVI6PMQKg,0": 34, "cn-YADw144OwLI,0": 35, "cn-YAEVIpofZJI,0": 36, "cn-YADw144OwLI,1": 37, "cn-YAEVIiOu35c,0": 38, "cn-YAEVI6PMQKg,1": 39, "cn-YAEVIiOu35c,1": 40, "cn-YADLvIjsCKM,1": 41, "cn-YAFmnHvpZBw,13": 42, "cn-YADLvO5aqmg,1": 43, "cn-YAFmnHvpZBw,9": 44, "cn-YADLvPfcNUo,2": 45, "cn-YADLvPfcNUo,0": 46, "cn-YADLvIjsCKM,0": 47, "cn-YAEVIl8Lg5o,0": 48, "cn-YAFE4L-hc9E,0": 49, "cn-YAFm-L3XVYc,2": 50, "cn-YAFm-L3XVYc,3": 51, "cn-YAFm-L3XVYc,4": 52, "cn-YAFm-L3XVYc,1": 53, "cn-YAFm-L3XVYc,0": 54, "cn-YAF44-IhFa4,0": 55, "cn-YAFM8N7B7jQ,0": 56, "cn-YAF1bRqtqVc,0": 57, "cn-YAFM8In-SFk,0": 58, "cn-YAF1bGgV8lg,0": 59, "cn-YAFO_ktdCCs,0": 60, "cn-YAFM8PLEJZo,0": 61, "cn-YAFR6Ewev0o,0": 62, "cn-YAFgg0ihWtI,0": 63, "cn-YAFM8IBvBQk,0": 64, "cn-YAFUJo0ILCo,0": 65, "cn-YAFUJqkZm1g,0": 66, "cn-YAFM8HYLse0,0": 67, "cn-YAFhX3dex68,0": 68, "cn-YAFhX6Mc75k,0": 69, "cn-YADxmG_q88E,0": 70, "cn-YAEpiyxwxEA,0": 71, "cn-YAEqMksnRs0,0": 72, "cn-YAFdJ1HPT5Y,1": 73, "cn-YAFdJ1HPT5Y,2": 74, "cn-YAFdJ1HPT5Y,5": 75, "cn-YAFdJ1HPT5Y,3": 76, "cn-YAFdJ1HPT5Y,0": 77, "cn-YAFdJ1HPT5Y,4": 78, "cn-YAC1IYcrX24,1": 79, "cn-YAC1IYcrX24,0": 80, "cn-YACgEfsjlPI,0": 81, "cn-YADLvBZKmRs,0": 82, "cn-YAFbElyPcAk,0": 83, "cn-YAErMI40MqQ,0": 84, "cn-YAFxsTm7Ehk,3": 85, "cn-YAFxsTm7Ehk,5": 86, "cn-YAFxsTm7Ehk,6": 87, "cn-YAFxsTm7Ehk,0": 88, "cn-YAFxsTm7Ehk,2": 89, "cn-YAFxsTm7Ehk,1": 90, "cn-YAFxsTm7Ehk,4": 91, "cn-YAFKHgUcwgs,0": 92, "cn-YAFbF6i2xPU,0": 93, "cn-YAFiCelikkQ,0": 94, "cn-YAFm90kJfGg,1": 95, "cn-YAFm90kJfGg,2": 96, "cn-YAFm90kJfGg,0": 97, "cn-YAFm90kJfGg,3": 98, "cn-YAFm90kJfGg,4": 99, "cn-YAFm90kJfGg,5": 100, "cn-YAFm90kJfGg,6": 101, "cn-YAFbAxoHSnQ,0": 102, "cn-YAFxsp45LlU,0": 103, "cn-YAFAemfu3EE,0": 104, "cn-YAFAejnH8AI,0": 105, "cn-YAFxsUIKii0,0": 106, "cn-YAFi8xehacg,0": 107, "cn-YAFxsqDnL70,0": 108, "cn-YAFi9j8sRzY,0": 109, "cn-YAFAel0SEwA,0": 110, "cn-YAFbEinIWBI,0": 111, "cn-YAFAeeMBDMA,0": 112, "cn-YAD88y0aeSk,0": 113, "cn-YAFKSwei2d4,0": 114, "cn-YACkoLOROjc,0": 115, "cn-YADxmAim-1I,0": 116, "cn-YADxmKnFX1c,1": 117, "cn-YADxmIrXx5Q,0": 118, "cn-YADxmMvdSmk,0": 119, "cn-YAEXIATaFDI,0": 120, "cn-YAEXIA3Ho2k,0": 121, "cn-YAEXILIfBpg,0": 122, "cn-YAFKsJSonN4,0": 123, "cn-YAD5frss4W4,0": 124, "cn-YADLvIVLCEI,0": 125, "cn-YAFM6itLloU,0": 126, "cn-YADLvPfcNUo,3": 127, "cn-YAD5fo9bs8I,0": 128, "cn-YACkoBe-PPY,0": 129, "cn-YAD5ftTfKkE,0": 130, "cn-YAD5fuUhbOo,0": 131, "cn-YAFbAa_kqVU,0": 132, "cn-YAEXIN-R7j8,0": 133, "cn-YAEXICO6FDQ,0": 134, "cn-YAEXIGgPTUc,0": 135, "cn-YAEVIuRBUzM,1": 136, "cn-YAEXINyIohc,0": 137, "cn-YAD5fnsXkiA,0": 138, "cn-YAD5fi9biYM,0": 139, "cn-YAEVIgNjpoI,1": 140, "cn-YAFViiF1HXA,0": 141, "cn-YADLvGXZ0uI,0": 142, "jp-YAFKDlChDd0,0": 0, "jp-YACgEf2JSZ4,0": 1, "jp-YADK4B_TPsg,0": 2, "jp-YACgEZMnOIo,0": 3, "jp-YACgEVOTc6M,0": 4, "jp-YACgEbWQPqI,1": 5, "jp-YACgEbWQPqI,0": 6, "jp-YAE-bg-JUsU,0": 7, "jp-YAE-bg-JUsU,1": 8, "jp-YACgEeLXaQ4,0": 9, "jp-YAFd-NWSSIw,1": 10, "jp-YAFd-NWSSIw,0": 11, "jp-YAEkLotIaA0,0": 12, "jp-YAEkLgxzZpI,0": 13, "jp-YAEkLtl5pBE,0": 14, "jp-YAEkLWewMC8,1": 15, "jp-YAEkLWewMC8,0": 16, "jp-YAEkLzE429Q,0": 17, "jp-YAEkLfCdQEg,1": 18, "jp-YAEkLfCdQEg,0": 19, "jp-YAEkLaX7hi0,0": 20, "jp-YAEkLxUu7Jk,1": 21, "jp-YAEkLxUu7Jk,0": 22, "jp-YAEkLoeZqsU,0": 23, "jp-YAEkL07LA1M,0": 24, "jp-YAEkLh1D-U8,0": 25, "jp-YAEkLh2jKMc,0": 26, "jp-YAEkLmWtbTk,0": 27, "jp-YAEkL1uwg4k,1": 28, "jp-YAEkL1uwg4k,0": 29, "jp-YAEkLwgsQIs,0": 30, "jp-YAEkL8KL1ow,1": 31, "jp-YAEkL8KL1ow,0": 32, "jp-YAEkLzIdm48,0": 33, "jp-YAEkLzIdm48,1": 34, "jp-YAFdJrHetVk,0": 35, "jp-YAEkLkgCDMQ,0": 36, "jp-YAEkLixWqlk,0": 37, "jp-YAF8a3PoRHQ,3": 38, "jp-YAF8a3PoRHQ,2": 39, "jp-YAEkL9LOrz8,0": 40, "jp-YAEkL9LOrz8,1": 41, "jp-YAEkLnxmucQ,0": 42, "jp-YAFKDy7zORU,0": 43, "jp-YAFdJsWapc8,3": 44, "jp-YAFd-Bs4Rfc,5": 45, "jp-YAFd-LsJgWs,4": 46, "jp-YAFd-LsJgWs,1": 47, "jp-YACgESrSOeI,0": 48, "jp-YADK4CzhQlk,0": 49, "jp-YACgEaEc4aY,0": 50, "jp-YADK4A_Ts9Q,0": 51, "jp-YAEsRRDx9pE,0": 52, "jp-YACgEUlyY9k,0": 53, "jp-YAEsRQ-GjjU,1": 54, "jp-YAFdJo8b3Iw,2": 55, "jp-YAFdJo8b3Iw,1": 56, "jp-YAEihdp5b8I,0": 57, "jp-YAFdJoXKOYY,0": 58, "jp-YAFdJoXKOYY,2": 59, "jp-YAFd-KcDns4,0": 60, "jp-YAFd-KcDns4,2": 61, "jp-YAEihZEfDKk,0": 62, "jp-YAFdJuLoTNk,0": 63, "jp-YAFdJuLoTNk,1": 64, "jp-YAEihI7wfNE,1": 65, "jp-YAEihBBLwyE,0": 66, "jp-YAEihBBLwyE,1": 67, "jp-YAFdJjY6VPI,0": 68, "jp-YAFdJjY6VPI,2": 69, "jp-YAFdJh4kb5o,0": 70, "jp-YAFdJh4kb5o,1": 71, "jp-YAEihaAc9Wk,0": 72, "jp-YAEihaAc9Wk,1": 73, "jp-YAFdJlRy_Lk,2": 74, "jp-YAFdJlRy_Lk,1": 75, "jp-YAFdJu_eOCs,2": 76, "jp-YAFdJu_eOCs,1": 77, "jp-YAEihAScgt8,0": 78, "jp-YAEihAScgt8,1": 79, "jp-YAEihd5XjgM,0": 80, "jp-YAEihFrwpZU,0": 81, "jp-YADK4ObEr1k,0": 82, "jp-YACgEWosGU0,1": 83, "jp-YAFdJiHXlcI,1": 84, "jp-YAEihNDZ2rI,1": 85, "jp-YAEihNDZ2rI,0": 86, "jp-YAFdJhq-cT0,0": 87, "jp-YAFdJhq-cT0,1": 88, "jp-YAFdJq27ZTo,0": 89, "jp-YAFdJq27ZTo,1": 90, "jp-YAFdJpoRqQ4,0": 91, "jp-YADK4GNUC3Q,0": 92, "jp-YAEsRVuYCTQ,0": 93, "jp-YAEihC_n_Mw,1": 94, "jp-YAEi_ZjCqfk,1": 95, "jp-YAEi_ZjCqfk,0": 96, "jp-YADK4Go59AA,1": 97, "jp-YADK4IjEWQY,1": 98, "jp-YAFdJgMJ6Bo,6": 99, "jp-YAFdJgMJ6Bo,3": 100, "jp-YAF3t-mUWkA,0": 101, "jp-YAF3t-mUWkA,5": 102, "jp-YAEkLdJELZs,2": 103, "jp-YAFdtQvP8D4,1": 104, "jp-YAFdtQvP8D4,0": 105, "jp-YAF8bBk5aX8,0": 106, "jp-YAF8bBk5aX8,7": 107, "jp-YADK4GVewrQ,0": 108, "jp-YACgEb4XyEc,0": 109, "jp-YACgEWy51zw,0": 110, "jp-YADK4Ai63cI,0": 111, "jp-YADK30wBia8,0": 112, "jp-YAFcfklnPbQ,1": 113, "jp-YAFcfklnPbQ,0": 114, "jp-YAFcfoYYvAo,0": 115, "jp-YAFcfgnXQTM,0": 116, "jp-YAFcfgpIW-s,0": 117, "jp-YACgEVekhuc,0": 118, "jp-YACgES6W8BM,0": 119, "jp-YAFcfgxAU44,0": 120, "jp-YAFcfgxAU44,1": 121, "jp-YAFwrMcUtws,0": 122, "jp-YAFM8LWPI4U,0": 123, "jp-YAFvEmfyXf8,0": 124, "jp-YAFR6Eb_muc,0": 125, "jp-YAFwwq0qwZo,0": 126, "jp-YAFu4LmAo0c,0": 127, "jp-YAFuaceNdD0,0": 128, "jp-YADK3_RN7Ns,1": 129, "jp-YAEhL_nS-Do,0": 130, "jp-YAEpv0-oxio,0": 131, "jp-YADK4KUfNHs,0": 132, "jp-YAFdtRAvA14,1": 133, "jp-YAFdtRAvA14,0": 134, "jp-YAFdtXwToNI,1": 135, "jp-YAFdJmyk2l0,1": 136, "jp-YAFdJmyk2l0,0": 137, "jp-YAFdJkON4lA,1": 138, "jp-YAFdJkON4lA,0": 139, "jp-YAFdJgJr9tI,1": 140, "jp-YAFdJgJr9tI,0": 141, "jp-YAFdtSrONUI,1": 142, "jp-YAFdtSrONUI,0": 143, "jp-YAEqMkgi5Dw,0": 144, "jp-YAFdtXZPFL8,1": 145, "jp-YAFdtXZPFL8,0": 146, "jp-YAFdJlU0Exw,1": 147, "jp-YAFdJlU0Exw,0": 148, "jp-YAF8PdHd-5I,0": 149, "jp-YACgEfsjlPI,0": 150, "jp-YACgEZ0Op54,0": 151, "jp-YAEqjYFWeDk,0": 152, "jp-YADK4PW3grQ,0": 153, "jp-YADK4JZFAAY,0": 154, "jp-YADK4BKUB34,0": 155, "jp-YACgEYopxEU,0": 156, "jp-YADK32c8Dh8,0": 157, "jp-YADK32c8Dh8,1": 158, "jp-YADK4DCASFE,0": 159, "jp-YAFdJky4SqQ,1": 160, "jp-YAFdJky4SqQ,0": 161, "jp-YADK4PYxDlk,0": 162, "jp-YAFdJmm1njU,0": 163, "jp-YADK4L9fSvM,0": 164, "jp-YAEhL82apSM,0": 165, "jp-YAEhL1yn0Dg,0": 166, "jp-YADK4I_rqDw,0": 167, "jp-YACgEXZEcR4,0": 168, "jp-YADsXJKDiVs,0": 169, "jp-YAEhL8rkTUI,0": 170, "jp-YAFyLzE-xQw,0": 171, "jp-YADK4H5cejw,0": 172, "jp-YAFdJmCr-wA,1": 173, "jp-YADK4M4pJaM,0": 174, "jp-YADK4Ddh880,0": 175, "jp-YADy3_aHdqw,1": 176, "jp-YADy3_aHdqw,0": 177, "jp-YAEkLTH_Bj4,1": 178, "jp-YAEkLTH_Bj4,0": 179, "jp-YAF8PQkQLBM,0": 180, "jp-YACgEe07wJU,0": 181, "jp-YAFFjuOqV7c,0": 182, "jp-YAFKDuCWJmI,0": 183, "jp-YADK4DPUlgo,0": 184, "jp-YADK4LAh_Mw,0": 185, "jp-YACkoBe-PPY,0": 186, "jp-YADK4D1hRDI,0": 187, "jp-YADK4Fv1k2o,0": 188, "jp-YAEpv6jUxHA,0": 189, "jp-YAFGHcN5e_Y,0": 190, "jp-YAFFjuwaUks,0": 191, "jp-YAFdJgDCFgs,4": 192, "jp-YAFdJgDCFgs,3": 193, "jp-YAFKDpCxcZ8,0": 194, "jp-YAFdJgMrcdw,4": 195, "jp-YAFdJgMrcdw,3": 196, "jp-YADK4AKfyZQ,0": 197, "jp-YAFdJibr0iM,0": 198, "jp-YADK4LHyf04,1": 199, "jp-YADK4LHyf04,0": 200, "jp-YADK4G5jfeo,0": 201, "jp-YAFdJjBXkhA,0": 202, "jp-YAFdJjBXkhA,1": 203, "jp-YAFdJkWJqPI,0": 204, "jp-YAFdJkWJqPI,3": 205, "jp-YADK4AXcXEA,0": 206, "jp-YAFdJuA0uX4,3": 207, "jp-YADK4IM64Ao,0": 208, "jp-YADK4FoE7Gc,0": 209, "jp-YACgEXMizTE,0": 210, "kr-YADK3730BNs,0": 0, "kr-YADK33SLGJs,0": 1, "kr-YADK33SLGJs,1": 2, "kr-YAFdJsgeLhs,8": 3, "kr-YAFdJsgeLhs,10": 4, "kr-YAEmtOC1LCg,1": 5, "kr-YAEmtOC1LCg,0": 6, "kr-YAFdJhAkLLo,4": 7, "kr-YAFdJhAkLLo,5": 8, "kr-YAEmtMTR5HU,0": 9, "kr-YAEmtMTR5HU,1": 10, "kr-YAFdJhAkLLo,2": 11, "kr-YAFdJhAkLLo,3": 12, "kr-YAFcfmj1C70,2": 13, "kr-YAFcfmj1C70,3": 14, "kr-YAFcfmj1C70,1": 15, "kr-YAFcfmj1C70,0": 16, "kr-YAFdJoype10,2": 17, "kr-YAFdJoype10,0": 18, "kr-YAFdJoype10,1": 19, "kr-YAFdC8KRBcA,2": 20, "kr-YAFdC8KRBcA,0": 21, "kr-YAFdC8KRBcA,1": 22, "kr-YAFdJn3zBA8,2": 23, "kr-YAFdJn3zBA8,0": 24, "kr-YAFdJn3zBA8,1": 25, "kr-YAFdJtonxqA,1": 26, "kr-YAFdJtonxqA,0": 27, "kr-YAFdJtonxqA,2": 28, "kr-YAEhvZanTiM,0": 29, "kr-YAFdJtB1Ppk,4": 30, "kr-YAFdJtB1Ppk,0": 31, "kr-YADK4FXKkMI,0": 32, "kr-YAFdJtB1Ppk,1": 33, "kr-YAFdJtB1Ppk,3": 34, "kr-YAFdJtB1Ppk,5": 35, "kr-YADK4PLQxPE,1": 36, "kr-YAE2V_kGeEw,1": 37, "kr-YAFmnBd9xfA,6": 38, "kr-YAFmnBd9xfA,2": 39, "kr-YAFmnBd9xfA,5": 40, "kr-YAE2V8o1NkQ,1": 41, "kr-YAE2V_kGeEw,0": 42, "kr-YAE2V8o1NkQ,0": 43, "kr-YADLvZJ2vVw,0": 44, "kr-YADLvZJ2vVw,1": 45, "kr-YAEv7fuV3Uc,0": 46, "kr-YAEv6jKHZ5c,0": 47, "kr-YAEv6tBm-T8,0": 48, "kr-YAEv6iDoTRc,0": 49, "kr-YAEv6pMO7-4,0": 50, "kr-YAEv6ip9K40,0": 51, "kr-YAEv7-8dlgk,0": 52, "kr-YAEv7X3TbR4,0": 53, "kr-YAEv7SXgkoc,0": 54, "kr-YAEv6l2nKtA,0": 55, "kr-YAEv7h6yQCU,0": 56, "kr-YAEv78qq_VY,0": 57, "kr-YAEv78Xf1QE,0": 58, "kr-YAEv7fg4Bmc,0": 59, "kr-YAEv7g5O-6I,0": 60, "kr-YAEmtwVPXOY,0": 61, "kr-YAFcfgxAU44,0": 62, "kr-YAFcfgxAU44,1": 63, "kr-YADK3w19fbU,0": 64, "kr-YADK37e0eJs,0": 65, "kr-YAFyG3A11Bw,0": 66, "kr-YAFcfn5J3C0,0": 67, "kr-YAFcfmqu0u4,0": 68, "kr-YAFcfgwrP5E,0": 69, "kr-YADK36TagGY,0": 70, "kr-YADK335zPus,0": 71, "kr-YADK36ab19U,0": 72, "kr-YAFdJj615Ls,1": 73, "kr-YAFdJj615Ls,2": 74, "kr-YADK39n_eyc,0": 75, "kr-YAFyMaznfGo,0": 76, "kr-YAFdJgVxIIc,0": 77, "kr-YADK37Cg39g,0": 78, "kr-YADK37Cg39g,1": 79, "kr-YAFdJgVxIIc,7": 80, "kr-YADK36tItIs,0": 81, "kr-YADK3-cPvE0,0": 82, "kr-YADK36tItIs,1": 83, "kr-YADK3-cPvE0,1": 84, "kr-YAFdJgVxIIc,6": 85, "kr-YADK3-knzio,0": 86, "kr-YADK39t6bXA,0": 87, "kr-YADK3-gZSDU,0": 88, "kr-YADK4IdwQKk,0": 89, "kr-YADK4Ge2KpY,0": 90, "kr-YADK4Ak9sFo,0": 91, "kr-YADK4BzwY0M,0": 92, "kr-YADK4BzwY0M,1": 93, "kr-YAFdJnlwgG8,2": 94, "kr-YAFdJnlwgG8,3": 95, "kr-YADK4GywOww,0": 96, "kr-YAFdJtxiiGc,0": 97, "kr-YADK4GywOww,1": 98, "kr-YADK4GyWuCE,0": 99, "kr-YADK4GyWuCE,1": 100, "kr-YADK4Odtakk,0": 101, "kr-YADK4BJS8-U,0": 102, "kr-YADK4Odtakk,1": 103, "kr-YAFdJq0B1zU,3": 104, "kr-YAFdJq0B1zU,4": 105, "kr-YAFdJtJhp3I,1": 106, "kr-YAFdJtJhp3I,2": 107, "kr-YAFdJtJhp3I,3": 108, "kr-YADt1fmqBIU,0": 109, "kr-YADt1bSf0B4,0": 110, "kr-YADt1Xf3MOg,0": 111, "kr-YADt1bPom8s,0": 112, "kr-YADt1WZ-9uw,0": 113, "kr-YADK4Gm6JgQ,0": 114, "kr-YADK4Gm6JgQ,1": 115, "kr-YADK4PuF5Do,0": 116, "kr-YADK4PuF5Do,1": 117, "kr-YADK4EdIC_Q,0": 118, "kr-YADK4EdIC_Q,1": 119, "kr-YACkoBe-PPY,0": 120, "kr-YADK37OOtIA,0": 121}
byt5/google__byt5-smal/.gitattributes ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.bin.* filter=lfs diff=lfs merge=lfs -text
2
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.h5 filter=lfs diff=lfs merge=lfs -text
5
+ *.tflite filter=lfs diff=lfs merge=lfs -text
6
+ *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
+ *.ot filter=lfs diff=lfs merge=lfs -text
8
+ *.onnx filter=lfs diff=lfs merge=lfs -text
9
+ *.arrow filter=lfs diff=lfs merge=lfs -text
10
+ *.ftz filter=lfs diff=lfs merge=lfs -text
11
+ *.joblib filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.pb filter=lfs diff=lfs merge=lfs -text
15
+ *.pt filter=lfs diff=lfs merge=lfs -text
16
+ *.pth filter=lfs diff=lfs merge=lfs -text
17
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
byt5/google__byt5-smal/README.md ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - multilingual
4
+ - af
5
+ - am
6
+ - ar
7
+ - az
8
+ - be
9
+ - bg
10
+ - bn
11
+ - ca
12
+ - ceb
13
+ - co
14
+ - cs
15
+ - cy
16
+ - da
17
+ - de
18
+ - el
19
+ - en
20
+ - eo
21
+ - es
22
+ - et
23
+ - eu
24
+ - fa
25
+ - fi
26
+ - fil
27
+ - fr
28
+ - fy
29
+ - ga
30
+ - gd
31
+ - gl
32
+ - gu
33
+ - ha
34
+ - haw
35
+ - hi
36
+ - hmn
37
+ - ht
38
+ - hu
39
+ - hy
40
+ - ig
41
+ - is
42
+ - it
43
+ - iw
44
+ - ja
45
+ - jv
46
+ - ka
47
+ - kk
48
+ - km
49
+ - kn
50
+ - ko
51
+ - ku
52
+ - ky
53
+ - la
54
+ - lb
55
+ - lo
56
+ - lt
57
+ - lv
58
+ - mg
59
+ - mi
60
+ - mk
61
+ - ml
62
+ - mn
63
+ - mr
64
+ - ms
65
+ - mt
66
+ - my
67
+ - ne
68
+ - nl
69
+ - no
70
+ - ny
71
+ - pa
72
+ - pl
73
+ - ps
74
+ - pt
75
+ - ro
76
+ - ru
77
+ - sd
78
+ - si
79
+ - sk
80
+ - sl
81
+ - sm
82
+ - sn
83
+ - so
84
+ - sq
85
+ - sr
86
+ - st
87
+ - su
88
+ - sv
89
+ - sw
90
+ - ta
91
+ - te
92
+ - tg
93
+ - th
94
+ - tr
95
+ - uk
96
+ - und
97
+ - ur
98
+ - uz
99
+ - vi
100
+ - xh
101
+ - yi
102
+ - yo
103
+ - zh
104
+ - zu
105
+ datasets:
106
+ - mc4
107
+
108
+ license: apache-2.0
109
+ ---
110
+
111
+ # ByT5 - Small
112
+
113
+ ByT5 is a tokenizer-free version of [Google's T5](https://ai.googleblog.com/2020/02/exploring-transfer-learning-with-t5.html) and generally follows the architecture of [MT5](https://huggingface.co/google/mt5-small).
114
+
115
+ ByT5 was only pre-trained on [mC4](https://www.tensorflow.org/datasets/catalog/c4#c4multilingual) excluding any supervised training with an average span-mask of 20 UTF-8 characters. Therefore, this model has to be fine-tuned before it is useable on a downstream task.
116
+
117
+ ByT5 works especially well on noisy text data,*e.g.*, `google/byt5-small` significantly outperforms [mt5-small](https://huggingface.co/google/mt5-small) on [TweetQA](https://arxiv.org/abs/1907.06292).
118
+
119
+ Paper: [ByT5: Towards a token-free future with pre-trained byte-to-byte models](https://arxiv.org/abs/2105.13626)
120
+
121
+ Authors: *Linting Xue, Aditya Barua, Noah Constant, Rami Al-Rfou, Sharan Narang, Mihir Kale, Adam Roberts, Colin Raffel*
122
+
123
+ ## Example Inference
124
+
125
+ ByT5 works on raw UTF-8 bytes and can be used without a tokenizer:
126
+
127
+ ```python
128
+ from transformers import T5ForConditionalGeneration
129
+ import torch
130
+
131
+ model = T5ForConditionalGeneration.from_pretrained('google/byt5-small')
132
+
133
+ input_ids = torch.tensor([list("Life is like a box of chocolates.".encode("utf-8"))]) + 3 # add 3 for special tokens
134
+ labels = torch.tensor([list("La vie est comme une boîte de chocolat.".encode("utf-8"))]) + 3 # add 3 for special tokens
135
+
136
+ loss = model(input_ids, labels=labels).loss # forward pass
137
+ ```
138
+
139
+ For batched inference & training it is however recommended using a tokenizer class for padding:
140
+
141
+ ```python
142
+ from transformers import T5ForConditionalGeneration, AutoTokenizer
143
+
144
+ model = T5ForConditionalGeneration.from_pretrained('google/byt5-small')
145
+ tokenizer = AutoTokenizer.from_pretrained('google/byt5-small')
146
+
147
+ model_inputs = tokenizer(["Life is like a box of chocolates.", "Today is Monday."], padding="longest", return_tensors="pt")
148
+ labels = tokenizer(["La vie est comme une boîte de chocolat.", "Aujourd'hui c'est lundi."], padding="longest", return_tensors="pt").input_ids
149
+
150
+ loss = model(**model_inputs, labels=labels).loss # forward pass
151
+ ```
152
+
153
+ ## Abstract
154
+
155
+ Most widely-used pre-trained language models operate on sequences of tokens corresponding to word or subword units. Encoding text as a sequence of tokens requires a tokenizer, which is typically created as an independent artifact from the model. Token-free models that instead operate directly on raw text (bytes or characters) have many benefits: they can process text in any language out of the box, they are more robust to noise, and they minimize technical debt by removing complex and error-prone text preprocessing pipelines. Since byte or character sequences are longer than token sequences, past work on token-free models has often introduced new model architectures designed to amortize the cost of operating directly on raw text. In this paper, we show that a standard Transformer architecture can be used with minimal modifications to process byte sequences. We carefully characterize the trade-offs in terms of parameter count, training FLOPs, and inference speed, and show that byte-level models are competitive with their token-level counterparts. We also demonstrate that byte-level models are significantly more robust to noise and perform better on tasks that are sensitive to spelling and pronunciation. As part of our contribution, we release a new set of pre-trained byte-level Transformer models based on the T5 architecture, as well as all code and data used in our experiments.
156
+
157
+ ![model image](https://raw.githubusercontent.com/patrickvonplaten/scientific_images/master/ByT5.png)
158
+
byt5/google__byt5-smal/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/patrick/t5/byt5-small",
3
+ "architectures": [
4
+ "T5ForConditionalGeneration"
5
+ ],
6
+ "d_ff": 3584,
7
+ "d_kv": 64,
8
+ "d_model": 1472,
9
+ "decoder_start_token_id": 0,
10
+ "dropout_rate": 0.1,
11
+ "eos_token_id": 1,
12
+ "feed_forward_proj": "gated-gelu",
13
+ "gradient_checkpointing": false,
14
+ "initializer_factor": 1.0,
15
+ "is_encoder_decoder": true,
16
+ "layer_norm_epsilon": 1e-06,
17
+ "model_type": "t5",
18
+ "num_decoder_layers": 4,
19
+ "num_heads": 6,
20
+ "num_layers": 12,
21
+ "pad_token_id": 0,
22
+ "relative_attention_num_buckets": 32,
23
+ "tie_word_embeddings": false,
24
+ "tokenizer_class": "ByT5Tokenizer",
25
+ "transformers_version": "4.7.0.dev0",
26
+ "use_cache": true,
27
+ "vocab_size": 384
28
+ }
byt5/google__byt5-smal/flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3aafee96d60e98aa18b3c7f73a2c5a2360f1f2f6df79361190a4c9e05c5ab21
3
+ size 1198558445
byt5/google__byt5-smal/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "decoder_start_token_id": 0,
4
+ "eos_token_id": 1,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.27.0.dev0"
7
+ }
byt5/google__byt5-smal/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c5aaf56299d6f2d4eaadad550a40765198828ead4d74f0a15f91cbe0961931a
3
+ size 1198627927
byt5/google__byt5-smal/special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "unk_token": {"content": "<unk>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "pad_token": {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>", "<extra_id_100>", "<extra_id_101>", "<extra_id_102>", "<extra_id_103>", "<extra_id_104>", "<extra_id_105>", "<extra_id_106>", "<extra_id_107>", "<extra_id_108>", "<extra_id_109>", "<extra_id_110>", "<extra_id_111>", "<extra_id_112>", "<extra_id_113>", "<extra_id_114>", "<extra_id_115>", "<extra_id_116>", "<extra_id_117>", "<extra_id_118>", "<extra_id_119>", "<extra_id_120>", "<extra_id_121>", "<extra_id_122>", "<extra_id_123>", "<extra_id_124>"]}
byt5/google__byt5-smal/tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f97320dd5eb49cb2323a21d584cef7c1cfc9a0976efa978fcef438676b952bc2
3
+ size 1198900664
byt5/google__byt5-smal/tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "unk_token": {"content": "<unk>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "pad_token": {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "extra_ids": 125, "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>", "<extra_id_100>", "<extra_id_101>", "<extra_id_102>", "<extra_id_103>", "<extra_id_104>", "<extra_id_105>", "<extra_id_106>", "<extra_id_107>", "<extra_id_108>", "<extra_id_109>", "<extra_id_110>", "<extra_id_111>", "<extra_id_112>", "<extra_id_113>", "<extra_id_114>", "<extra_id_115>", "<extra_id_116>", "<extra_id_117>", "<extra_id_118>", "<extra_id_119>", "<extra_id_120>", "<extra_id_121>", "<extra_id_122>", "<extra_id_123>", "<extra_id_124>"]}
config.json ADDED
@@ -0,0 +1,290 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "BailingMM2NativeForConditionalGeneration"
4
+ ],
5
+ "audio_config": {
6
+ "_name_or_path": "",
7
+ "add_cross_attention": false,
8
+ "architectures": null,
9
+ "bad_words_ids": null,
10
+ "begin_suppress_tokens": null,
11
+ "bos_token_id": null,
12
+ "chunk_size_feed_forward": 0,
13
+ "cross_attention_hidden_size": null,
14
+ "decoder_start_token_id": null,
15
+ "diversity_penalty": 0.0,
16
+ "do_sample": false,
17
+ "ds_kernel_size": 3,
18
+ "ds_stride": 2,
19
+ "early_stopping": false,
20
+ "encoder_no_repeat_ngram_size": 0,
21
+ "eos_token_id": null,
22
+ "exponential_decay_length_penalty": null,
23
+ "finetuning_task": null,
24
+ "forced_bos_token_id": null,
25
+ "forced_eos_token_id": null,
26
+ "id2label": {
27
+ "0": "LABEL_0",
28
+ "1": "LABEL_1"
29
+ },
30
+ "is_decoder": false,
31
+ "is_encoder_decoder": false,
32
+ "label2id": {
33
+ "LABEL_0": 0,
34
+ "LABEL_1": 1
35
+ },
36
+ "length_penalty": 1.0,
37
+ "max_length": 20,
38
+ "min_length": 0,
39
+ "model_type": "",
40
+ "no_repeat_ngram_size": 0,
41
+ "norm_query_embeds": true,
42
+ "num_beam_groups": 1,
43
+ "num_beams": 1,
44
+ "num_return_sequences": 1,
45
+ "output_attentions": false,
46
+ "output_hidden_states": false,
47
+ "output_scores": false,
48
+ "pad_token_id": null,
49
+ "prefix": null,
50
+ "problem_type": null,
51
+ "pruned_heads": {},
52
+ "remove_invalid_values": false,
53
+ "repetition_penalty": 1.0,
54
+ "return_dict": true,
55
+ "return_dict_in_generate": false,
56
+ "sep_token_id": null,
57
+ "suppress_tokens": null,
58
+ "task_specific_params": null,
59
+ "temperature": 1.0,
60
+ "tf_legacy_loss": false,
61
+ "tie_encoder_decoder": false,
62
+ "tie_word_embeddings": true,
63
+ "tokenizer_class": null,
64
+ "top_k": 50,
65
+ "top_p": 1.0,
66
+ "torch_dtype": null,
67
+ "torchscript": false,
68
+ "typical_p": 1.0,
69
+ "use_bfloat16": false,
70
+ "whisper_encoder_config": {
71
+ "n_ctx": 15000,
72
+ "n_head": 20,
73
+ "n_layer": 32,
74
+ "n_mels": 128,
75
+ "n_state": 1280
76
+ }
77
+ },
78
+ "llm_config": {
79
+ "_name_or_path": "",
80
+ "add_cross_attention": false,
81
+ "architectures": [
82
+ "BailingMoeV2ForCausalLM"
83
+ ],
84
+ "attention_dropout": 0.0,
85
+ "auto_map": {
86
+ "AutoConfig": "configuration_bailing_moe_v2.BailingMoeV2Config",
87
+ "AutoModel": "modeling_bailing_moe_v2.BailingMoeV2Model",
88
+ "AutoModelForCausalLM": "modeling_bailing_moe_v2.BailingMoeV2ForCausalLM"
89
+ },
90
+ "bad_words_ids": null,
91
+ "begin_suppress_tokens": null,
92
+ "bos_token_id": null,
93
+ "chunk_size_feed_forward": 0,
94
+ "cross_attention_hidden_size": null,
95
+ "decoder_start_token_id": null,
96
+ "diversity_penalty": 0.0,
97
+ "do_sample": false,
98
+ "early_stopping": false,
99
+ "embedding_dropout": 0.0,
100
+ "encoder_no_repeat_ngram_size": 0,
101
+ "eos_token_id": 156895,
102
+ "exponential_decay_length_penalty": null,
103
+ "finetuning_task": null,
104
+ "first_k_dense_replace": 1,
105
+ "forced_bos_token_id": null,
106
+ "forced_eos_token_id": null,
107
+ "head_dim": 128,
108
+ "hidden_act": "silu",
109
+ "hidden_size": 4096,
110
+ "id2label": {
111
+ "0": "LABEL_0",
112
+ "1": "LABEL_1"
113
+ },
114
+ "image_patch_token": 157157,
115
+ "image_start_token": 157158,
116
+ "initializer_range": 0.006,
117
+ "intermediate_size": 9216,
118
+ "is_decoder": false,
119
+ "is_encoder_decoder": false,
120
+ "label2id": {
121
+ "LABEL_0": 0,
122
+ "LABEL_1": 1
123
+ },
124
+ "length_penalty": 1.0,
125
+ "max_length": 20,
126
+ "max_position_embeddings": 32768,
127
+ "max_window_layers": 28,
128
+ "min_length": 0,
129
+ "model_type": "bailing_moe_v2",
130
+ "moe_intermediate_size": 1024,
131
+ "moe_router_topk_scaling_factor": 2.5,
132
+ "n_group": 8,
133
+ "no_repeat_ngram_size": 0,
134
+ "norm_head": false,
135
+ "norm_softmax": false,
136
+ "norm_topk_prob": true,
137
+ "num_attention_heads": 32,
138
+ "num_beam_groups": 1,
139
+ "num_beams": 1,
140
+ "num_experts": 256,
141
+ "num_experts_per_tok": 8,
142
+ "num_hidden_layers": 32,
143
+ "num_key_value_heads": 4,
144
+ "num_return_sequences": 1,
145
+ "num_shared_experts": 1,
146
+ "output_attentions": false,
147
+ "output_dropout": 0.0,
148
+ "output_hidden_states": false,
149
+ "output_router_logits": false,
150
+ "output_scores": false,
151
+ "pad_token_id": 156892,
152
+ "partial_rotary_factor": 0.5,
153
+ "prefix": null,
154
+ "pretraining_tp": 1,
155
+ "problem_type": null,
156
+ "pruned_heads": {},
157
+ "remove_invalid_values": false,
158
+ "repetition_penalty": 1.0,
159
+ "return_dict": true,
160
+ "return_dict_in_generate": false,
161
+ "rms_norm_eps": 1e-06,
162
+ "rope_scaling": {
163
+ "factor": null,
164
+ "type": "video_rope"
165
+ },
166
+ "rope_theta": 600000,
167
+ "routed_scaling_factor": 2.5,
168
+ "router_type": "MultiRouter",
169
+ "sep_token_id": null,
170
+ "sliding_window": 4096,
171
+ "spatial_merge_size": 2,
172
+ "suppress_tokens": null,
173
+ "task_specific_params": null,
174
+ "temperature": 1.0,
175
+ "tf_legacy_loss": false,
176
+ "tie_encoder_decoder": false,
177
+ "tie_word_embeddings": false,
178
+ "tokenizer_class": null,
179
+ "tokens_per_second": 2,
180
+ "top_k": 50,
181
+ "top_p": 1.0,
182
+ "topk_group": 4,
183
+ "torch_dtype": "bfloat16",
184
+ "torchscript": false,
185
+ "typical_p": 1.0,
186
+ "use_bfloat16": false,
187
+ "use_bias": false,
188
+ "use_cache": true,
189
+ "use_expert_bias": true,
190
+ "use_qkv_bias": false,
191
+ "use_sliding_window": false,
192
+ "video_patch_token": 157175,
193
+ "video_start_token": 157159,
194
+ "vocab_size": 157184
195
+ },
196
+ "mlp_depth": 2,
197
+ "model_type": "bailingmm_moe_v2_lite",
198
+ "talker_config": null,
199
+ "torch_dtype": "bfloat16",
200
+ "transformers_version": "4.53.0.dev0",
201
+ "vision_config": {
202
+ "_name_or_path": "",
203
+ "add_cross_attention": false,
204
+ "architectures": [
205
+ "Qwen2_5_VisionTransformer"
206
+ ],
207
+ "auto_map": {
208
+ "AutoConfig": "configuration_qwen2_5_vit.Qwen2_5_VLVisionConfig",
209
+ "AutoModel": "qwen2_5_vit.Qwen2_5_VisionTransformer"
210
+ },
211
+ "bad_words_ids": null,
212
+ "begin_suppress_tokens": null,
213
+ "bos_token_id": null,
214
+ "chunk_size_feed_forward": 0,
215
+ "cross_attention_hidden_size": null,
216
+ "decoder_start_token_id": null,
217
+ "depth": 32,
218
+ "diversity_penalty": 0.0,
219
+ "do_sample": false,
220
+ "early_stopping": false,
221
+ "encoder_no_repeat_ngram_size": 0,
222
+ "eos_token_id": null,
223
+ "exponential_decay_length_penalty": null,
224
+ "finetuning_task": null,
225
+ "forced_bos_token_id": null,
226
+ "forced_eos_token_id": null,
227
+ "fullatt_block_indexes": [
228
+ 7,
229
+ 15,
230
+ 23,
231
+ 31
232
+ ],
233
+ "hidden_act": "silu",
234
+ "hidden_size": 1280,
235
+ "id2label": {
236
+ "0": "LABEL_0",
237
+ "1": "LABEL_1"
238
+ },
239
+ "in_channels": 3,
240
+ "in_chans": 3,
241
+ "intermediate_size": 3456,
242
+ "is_decoder": false,
243
+ "is_encoder_decoder": false,
244
+ "label2id": {
245
+ "LABEL_0": 0,
246
+ "LABEL_1": 1
247
+ },
248
+ "length_penalty": 1.0,
249
+ "max_length": 20,
250
+ "min_length": 0,
251
+ "model_type": "qwen2_5_vit",
252
+ "no_repeat_ngram_size": 0,
253
+ "num_beam_groups": 1,
254
+ "num_beams": 1,
255
+ "num_heads": 16,
256
+ "num_return_sequences": 1,
257
+ "out_hidden_size": 8192,
258
+ "output_attentions": false,
259
+ "output_hidden_states": false,
260
+ "output_scores": false,
261
+ "pad_token_id": null,
262
+ "patch_size": 14,
263
+ "prefix": null,
264
+ "problem_type": null,
265
+ "pruned_heads": {},
266
+ "remove_invalid_values": false,
267
+ "repetition_penalty": 1.0,
268
+ "return_dict": true,
269
+ "return_dict_in_generate": false,
270
+ "sep_token_id": null,
271
+ "spatial_merge_size": 2,
272
+ "spatial_patch_size": 14,
273
+ "suppress_tokens": null,
274
+ "task_specific_params": null,
275
+ "temperature": 1.0,
276
+ "temporal_patch_size": 2,
277
+ "tf_legacy_loss": false,
278
+ "tie_encoder_decoder": false,
279
+ "tie_word_embeddings": true,
280
+ "tokenizer_class": null,
281
+ "tokens_per_second": 2,
282
+ "top_k": 50,
283
+ "top_p": 1.0,
284
+ "torch_dtype": "bfloat16",
285
+ "torchscript": false,
286
+ "typical_p": 1.0,
287
+ "use_bfloat16": false,
288
+ "window_size": 112
289
+ }
290
+ }
connector/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2ForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 1536,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 8960,
12
+ "max_position_embeddings": 32768,
13
+ "max_window_layers": 21,
14
+ "model_type": "qwen2",
15
+ "num_attention_heads": 12,
16
+ "num_hidden_layers": 28,
17
+ "num_key_value_heads": 2,
18
+ "rms_norm_eps": 1e-06,
19
+ "rope_scaling": null,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": 32768,
22
+ "tie_word_embeddings": true,
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.51.3",
25
+ "use_cache": true,
26
+ "use_sliding_window": false,
27
+ "vocab_size": 151936
28
+ }
connector/generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.1,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.51.3"
14
+ }
connector/model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ba0ca2a6063ff64b4a3b65a590f5e75c9069425a6b7c6bc7660624c9f654238
3
+ size 4996670464
connector/model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6388178c9109d3141d95c1a77ee1403e1ee356e97bcaab07000370fe21c98b0b
3
+ size 1178224960
connector/model.safetensors.index.json ADDED
@@ -0,0 +1,345 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 6174857216
4
+ },
5
+ "weight_map": {
6
+ "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
16
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
18
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
20
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
21
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
22
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
24
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
25
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
26
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
27
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
28
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
29
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
30
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
31
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
32
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
35
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
37
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
40
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
42
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
43
+ "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
44
+ "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
45
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
47
+ "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
48
+ "model.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
49
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
+ "model.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
52
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
54
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
56
+ "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
58
+ "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
59
+ "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
60
+ "model.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
61
+ "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
62
+ "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
63
+ "model.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
64
+ "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
65
+ "model.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
66
+ "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
67
+ "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
68
+ "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
69
+ "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
70
+ "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
72
+ "model.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
73
+ "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
76
+ "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
78
+ "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
80
+ "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
81
+ "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
84
+ "model.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
85
+ "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
+ "model.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
88
+ "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
90
+ "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
91
+ "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
92
+ "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
94
+ "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
95
+ "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
96
+ "model.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
97
+ "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
98
+ "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
99
+ "model.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
100
+ "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
101
+ "model.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
102
+ "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
103
+ "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
104
+ "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
106
+ "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
108
+ "model.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
109
+ "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
110
+ "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
111
+ "model.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
112
+ "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
113
+ "model.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
114
+ "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
115
+ "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
116
+ "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
117
+ "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
118
+ "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
119
+ "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
120
+ "model.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
121
+ "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
+ "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
+ "model.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
124
+ "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
125
+ "model.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
126
+ "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
127
+ "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
128
+ "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
129
+ "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
130
+ "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
131
+ "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
132
+ "model.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
133
+ "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
134
+ "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
135
+ "model.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
136
+ "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
137
+ "model.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
138
+ "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
139
+ "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
140
+ "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
141
+ "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
142
+ "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
143
+ "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
144
+ "model.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
145
+ "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
146
+ "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
147
+ "model.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
148
+ "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
149
+ "model.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
150
+ "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
151
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
152
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
153
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
154
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
155
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
156
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
157
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
158
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
159
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
160
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
161
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
162
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
163
+ "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
164
+ "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
165
+ "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
166
+ "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
167
+ "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
168
+ "model.layers.20.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
169
+ "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
170
+ "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
171
+ "model.layers.20.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
172
+ "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
173
+ "model.layers.20.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
174
+ "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
175
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
176
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
178
+ "model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
179
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
180
+ "model.layers.21.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
181
+ "model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
182
+ "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
183
+ "model.layers.21.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
184
+ "model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
185
+ "model.layers.21.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
186
+ "model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
187
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
188
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
189
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
190
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
191
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
192
+ "model.layers.22.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
193
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
194
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.layers.22.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
196
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
197
+ "model.layers.22.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
198
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
199
+ "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
200
+ "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
201
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
202
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
203
+ "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
204
+ "model.layers.23.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
205
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
206
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
207
+ "model.layers.23.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
208
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
209
+ "model.layers.23.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
210
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
211
+ "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
212
+ "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
213
+ "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
214
+ "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
215
+ "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
216
+ "model.layers.24.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
217
+ "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
218
+ "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
219
+ "model.layers.24.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
220
+ "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
221
+ "model.layers.24.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
222
+ "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
223
+ "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
224
+ "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
225
+ "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
226
+ "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
227
+ "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
228
+ "model.layers.25.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
229
+ "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
230
+ "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
231
+ "model.layers.25.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
232
+ "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
233
+ "model.layers.25.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
234
+ "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
235
+ "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
236
+ "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
237
+ "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
238
+ "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
239
+ "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
240
+ "model.layers.26.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
241
+ "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
242
+ "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
243
+ "model.layers.26.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
244
+ "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
245
+ "model.layers.26.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
246
+ "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
247
+ "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
248
+ "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
249
+ "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
250
+ "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
251
+ "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
252
+ "model.layers.27.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
253
+ "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
254
+ "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
255
+ "model.layers.27.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
256
+ "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
257
+ "model.layers.27.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
258
+ "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
259
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
260
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
261
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
262
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
263
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
264
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
265
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
266
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
267
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
268
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
269
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
270
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
271
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
272
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
273
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
274
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
275
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
276
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
277
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
278
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
279
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
280
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
281
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
282
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
283
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
284
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
285
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
286
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
287
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
288
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
289
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
290
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
291
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
292
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
293
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
294
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
295
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
296
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
297
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
298
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
299
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
300
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
301
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
302
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
303
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
304
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
305
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
306
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
307
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
308
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
309
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
310
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
311
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
312
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
313
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
314
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
315
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
316
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
317
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
318
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
319
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
320
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
321
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
322
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
323
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
324
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
325
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
326
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
327
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
328
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
329
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
330
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
331
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
332
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
333
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
334
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
335
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
336
+ "model.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
337
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
338
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
339
+ "model.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
340
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
341
+ "model.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
342
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
343
+ "model.norm.weight": "model-00002-of-00002.safetensors"
344
+ }
345
+ }
mlp/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hidden_dim": 2240,
3
+ "mid_hidden_dim": 4480,
4
+ "output_dim": 32,
5
+ "activation": "gelu",
6
+ "layer_norm_eps": 1e-05,
7
+ "initializer_range": 0.02,
8
+ "architectures": [
9
+ "CustomMidLayerMLPModel"
10
+ ]
11
+ }
mlp/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3687baf986d03fa505b7ceff47b643c4eb599d5c85b34347a20ffba389442962
3
+ size 362151776
model-00001-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8214153a951c11cb783ff573c59ef5057042d37b66daa1ff4e5d72facab98006
3
+ size 4995280701
model-00002-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa1dc1aaada0cf0099c0012a5d8824c0bdc0498b36b54424ab2a1020e2911db1
3
+ size 4999688184
model-00003-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43ef00ce0b44ec7efc64df53d162ef551b970aebac77c12f2c3f6d8bc345608a
3
+ size 4997609472
model-00004-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59c26c24c17c300167f3520dd5fcda651c8882bf49cc96e2ff41d949be2bf5eb
3
+ size 4997609472
model-00005-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45d9b70a5905933516c83be3810a8ff14915729144666f70db08b1e354cc7603
3
+ size 4997609656
model-00006-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:844035b6596b6ab1ef8264215c021d79b250189022f942b2adba2eca2041ac17
3
+ size 4999688232
model-00007-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83cc7644ced11a858cbc01e27fd7bda355ead651b974b3940f59bc0f1e872562
3
+ size 4997609472
model-00008-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d68944f9c5ce6243f5cb048cc8f6beff9e0ae11931e263b5dcfb7d30b14615d0
3
+ size 4997609472
model-00009-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84c59b707277057dbf99dac03f874633205d2302221bd29f49f03364b7466398
3
+ size 4997609616
model-00010-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c4a83d2f9bcfd9fb48cf42589baf50a2f0e22289d1821b5ae318a9d9489614e
3
+ size 4999688272
model-00011-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0971ece8d45fc61d210242c16261a9c0deb76663666a08feec70d3c381211295
3
+ size 4997609472
model-00012-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:326554c1de9cb7d030889552d5624686aa73ad5278279226b3ca09a6b52062ad
3
+ size 4997609472
model-00013-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7465910047eecf1c439159c78ac8c1b54390953c018a45ef0d65217e93761b0c
3
+ size 4997609824
model-00014-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48431313c55cd6d17b481ae3a4771d4001835a38b9116277ae5e78055831d173
3
+ size 4997610272
model-00015-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2dd0a1b768a6d29acc80052c0dfaa5d24358b465e5d0e78a46aad0c46965c075
3
+ size 4999688704
model-00016-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd2b0a5d69ed779ff34245aed3df77034a1379704bf221d4605d02bc5caa00c6
3
+ size 4997610072
model-00017-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:467a4f7c307727ad0d3a85d935a1c668194eb2e00ce42bdf4588bb330527cdfd
3
+ size 4997610136
model-00018-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a47cf29fbea4126ff8887e6f68cb0e62af97254b909a0f4401f6bd8481e08d07
3
+ size 4997610256
model-00019-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10a31742c91488120a2a4e1b271f995aa8c82b0103432cce3969e136b4647121
3
+ size 4999688760
model-00020-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af118dac4de1d28574eec1e7b24a214e2926f13a87d892a90899c7c514e44722
3
+ size 4997610072
model-00021-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecd0f8e65281bc7d7dba9d94ac7e2fd27dc502264e2997c9f834b8449fc761a1
3
+ size 4997610096
model-00022-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2605f7216db29acd8afb54e78db105d144ea9c9c393f5d6fbf172d4ad106a47b
3
+ size 4997610256
model-00023-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0ef4570f0ae0d4643b2ae49e175515d5ecdcc9a711249c7848b23e39829b457
3
+ size 4999688800
model-00024-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43f8b1f2ca4e94c50d810c099b4548a28bf3793e81af89353a0ef935150f7eb2
3
+ size 4997610072
model-00025-of-00042.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2884f954db3b69f665e4808383eb3c84a3a963713b26cf262c326d3cea4f75ef
3
+ size 4997610072