Skip to content

Commit

Permalink
Fix Github Action πŸ‘·β€β™€οΈ (#404)
Browse files Browse the repository at this point in the history
Fix action πŸ‘·β€β™€οΈ
  • Loading branch information
KarelZe authored Jun 9, 2023
1 parent cdf4819 commit 508cded
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 8 deletions.
21 changes: 14 additions & 7 deletions src/otc/models/fttransformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,27 +523,33 @@ def __init__(
if self.W_out is not None:
nn.init.zeros_(self.W_out.bias)

def save_attn(self, attn):
def save_attn(self, attn: torch) -> None:
"""
save attention probabilities tensor.
Save attention probabilities tensor.
Args:
attn (torch): attention probabilities.
"""
self.attn = attn

def get_attn(self) -> torch.Tensor:
"""
get attention probabilites tensor.
Get attention probabilites tensor.
"""
return self.attn

def save_attn_gradients(self, attn_gradients):
def save_attn_gradients(self, attn_gradients: torch.Tensor) -> None:
"""
save attention gradients tensor.
Save attention gradients tensor.
Args:
attn_gradients (torch.Tensor): attention gradients.
"""
self.attn_gradients = attn_gradients

def get_attn_gradients(self) -> torch.Tensor:
"""
get attention gradients tensor.
Get attention gradients tensor.
"""
return self.attn_gradients

Expand Down Expand Up @@ -609,7 +615,8 @@ def forward(
attention_probs = self.dropout(attention_probs)

self.save_attn(attention_probs)
attention_probs.register_hook(self.save_attn_gradients)
if attention_probs.requires_grad:
attention_probs.register_hook(self.save_attn_gradients)

x = attention_probs @ self._reshape(v)
x = (
Expand Down
1 change: 0 additions & 1 deletion tests/test_fttransformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@ def setup(self) -> None:
self.expected_outputs = (
torch.randint(0, 1, (self.batch_size, 1)).float().to(device)
)
self.expected_outputs.requires_grad_(True)

# https://github.com/Yura52/rtdl/blob/main/rtdl/modules.py

Expand Down

0 comments on commit 508cded

Please sign in to comment.