File size: 1,407 Bytes
2bd9f2a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85560b0
2bd9f2a
 
 
 
 
85560b0
2bd9f2a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import torch
from torch import nn


   



class ActivatorGatingUnit(nn.Module):
    def __init__(self,dim, hidden_dim):
        super().__init__()
        self.proj_1 =  nn.Linear(dim, hidden_dim)
        self.proj_2 =  nn.Linear(dim, hidden_dim)
        self.proj_3 = nn.Linear(hidden_dim , dim)     
        self.gelu = nn.GELU()
        self.norm = nn.LayerNorm(hidden_dim)
             	   
    def forward(self, x):
        u, v = x, x 
        u = self.proj_1(u)
        u = self.gelu(u)
        u = self.norm(u)
        
        v = self.proj_2(v)
        v = self.norm(v)
       
        g = u * v
        
        out = self.proj_3(g)
        return out



class ActivatorBlock(nn.Module):
    def __init__(self, d_model, d_ffn,dropout):
        super().__init__()
       
        self.norm = nn.LayerNorm(d_model)       
        self.actgu = ActivatorGatingUnit(d_model, d_ffn)
        
    def forward(self, x):
        residual = x
        x = self.norm(x)
        x = self.actgu(x)           
        x = x + residual      
        
        out = x
        return out



class ACTIVATOR(nn.Module):
    def __init__(self, d_model, d_ffn, num_layers,dropout):
        super().__init__()
        
        self.model = nn.Sequential(
            *[ActivatorBlock(d_model,d_ffn,dropout) for _ in range(num_layers)]
        )

    def forward(self, x):
       
        return self.model(x)