File size: 393 Bytes
3912a9f
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
from transformers import PretrainedConfig
        
class InteractionModelATTNConfig(PretrainedConfig):
    model_type = "dlmberta"
    def __init__(self, attention_dropout = 0.2, hidden_dropout = 0.2, num_heads = 1, **kwargs,):
        self.num_heads = num_heads
        self.hidden_dropout = hidden_dropout
        self.attention_dropout = attention_dropout
        super().__init__(**kwargs)