LongCat0830 wjn1996 commited on
Commit
7c9aad1
·
verified ·
1 Parent(s): 55f5777

Update configuration_longcat_flash.py (#5)

Browse files

- Update configuration_longcat_flash.py (4357d408aa0bfbac7d38b5cea2bda88745a338b5)


Co-authored-by: WangJianing <[email protected]>

Files changed (1) hide show
  1. configuration_longcat_flash.py +2 -3
configuration_longcat_flash.py CHANGED
@@ -1,4 +1,3 @@
1
-
2
  """LongcatFlash model configuration"""
3
 
4
  from transformers.configuration_utils import PretrainedConfig
@@ -53,7 +52,7 @@ class LongcatFlashConfig(PretrainedConfig):
53
  Dimension of the value heads.
54
  qk_nope_head_dim (`int`, *optional*, defaults to 128):
55
  Dimension of the query/key heads that don't use rotary position embeddings.
56
- norm_topk_prob (`bool`, *optional*, defaults to `True`):
57
  Whether to normalize the weights of the routed experts.
58
  hidden_act (`str` or `function`, *optional*, defaults to `"silu"`):
59
  The non-linear activation function (function or string) in the decoder.
@@ -213,4 +212,4 @@ class LongcatFlashConfig(PretrainedConfig):
213
  return self.num_layers
214
 
215
 
216
- __all__ = ["LongcatFlashConfig"]
 
 
1
  """LongcatFlash model configuration"""
2
 
3
  from transformers.configuration_utils import PretrainedConfig
 
52
  Dimension of the value heads.
53
  qk_nope_head_dim (`int`, *optional*, defaults to 128):
54
  Dimension of the query/key heads that don't use rotary position embeddings.
55
+ norm_topk_prob (`bool`, *optional*, defaults to `False`):
56
  Whether to normalize the weights of the routed experts.
57
  hidden_act (`str` or `function`, *optional*, defaults to `"silu"`):
58
  The non-linear activation function (function or string) in the decoder.
 
212
  return self.num_layers
213
 
214
 
215
+ __all__ = ["LongcatFlashConfig"]