-
Notifications
You must be signed in to change notification settings - Fork 1
/
decoder.py
24 lines (22 loc) · 820 Bytes
/
decoder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
from torch.nn import Module, ModuleList, Linear, SiLU
from torch import Tensor
class Decoder(Module):
in_proj: Linear
hidden_layers: ModuleList
out_proj: Linear
def __init__(self, hidden_layer_count=1, inner_dim=12) -> None:
super().__init__()
self.in_proj = Linear(4, inner_dim)
make_nonlin = SiLU
self.nonlin = make_nonlin()
self.hidden_layers = ModuleList([
layer for layer in (Linear(inner_dim, inner_dim), make_nonlin()) for _ in range(hidden_layer_count)
])
self.out_proj = Linear(inner_dim, 3)
def forward(self, sample: Tensor) -> Tensor:
sample: Tensor = self.in_proj(sample)
sample: Tensor = self.nonlin(sample)
for layer in self.hidden_layers:
sample: Tensor = layer.forward(sample)
sample: Tensor = self.out_proj(sample)
return sample