|
|
|
"""Activation modules."""
|
|
|
|
import torch
|
|
import torch.nn as nn
|
|
|
|
|
|
class AGLU(nn.Module):
|
|
"""Unified activation function module from https://github.com/kostas1515/AGLU."""
|
|
|
|
def __init__(self, device=None, dtype=None) -> None:
|
|
"""Initialize the Unified activation function."""
|
|
super().__init__()
|
|
self.act = nn.Softplus(beta=-1.0)
|
|
self.lambd = nn.Parameter(nn.init.uniform_(torch.empty(1, device=device, dtype=dtype)))
|
|
self.kappa = nn.Parameter(nn.init.uniform_(torch.empty(1, device=device, dtype=dtype)))
|
|
|
|
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
|
"""Compute the forward pass of the Unified activation function."""
|
|
lam = torch.clamp(self.lambd, min=0.0001)
|
|
return torch.exp((1 / lam) * self.act((self.kappa * x) - torch.log(lam)))
|
|
|