Skip to content

Commit

Permalink
allow for turning off the classification head, or to return embedding…
Browse files Browse the repository at this point in the history
…s early
  • Loading branch information
lucidrains committed Sep 13, 2021
1 parent 1881582 commit b33aced
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 5 deletions.
23 changes: 19 additions & 4 deletions perceiver_pytorch/perceiver_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import torch.nn.functional as F

from einops import rearrange, repeat
from einops.layers.torch import Reduce

# helpers

Expand Down Expand Up @@ -141,7 +142,8 @@ def __init__(
ff_dropout = 0.,
weight_tie_layers = False,
fourier_encode_data = True,
self_per_cross_attn = 1
self_per_cross_attn = 1,
final_classifier_head = True
):
"""The shape of the final attention mechanism will be:
depth * (cross attention -> self_per_cross_attn * self attention)
Expand Down Expand Up @@ -169,6 +171,7 @@ def __init__(
the input_axis given. defaults to True, but can be turned off
if you are fourier encoding the data yourself.
self_per_cross_attn: Number of self attention blocks per cross attn.
final_classifier_head: mean pool and project embeddings to number of classes (num_classes) at the end
"""
super().__init__()
self.input_axis = input_axis
Expand Down Expand Up @@ -209,11 +212,17 @@ def __init__(
]))

self.to_logits = nn.Sequential(
Reduce('b n d -> b d', 'mean'),
nn.LayerNorm(latent_dim),
nn.Linear(latent_dim, num_classes)
)
) if final_classifier_head else nn.Identity()

def forward(self, data, mask = None):
def forward(
self,
data,
mask = None,
return_embeddings = False
):
b, *axis, _, device = *data.shape, data.device
assert len(axis) == self.input_axis, 'input data must have the right number of axis'

Expand Down Expand Up @@ -244,5 +253,11 @@ def forward(self, data, mask = None):
x = self_attn(x) + x
x = self_ff(x) + x

x = x.mean(dim = -2)
# allow for fetching embeddings

if return_embeddings:
return x

# to logits

return self.to_logits(x)
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'perceiver-pytorch',
packages = find_packages(),
version = '0.7.0',
version = '0.7.1',
license='MIT',
description = 'Perceiver - Pytorch',
author = 'Phil Wang',
Expand Down

0 comments on commit b33aced

Please sign in to comment.