Skip to content
Snippets Groups Projects
Commit 92f65c05 authored by Caroline DE POURTALES's avatar Caroline DE POURTALES
Browse files

correction on positional enco

parent 5bb83ca8
Branches
No related tags found
2 merge requests!6Linker with transformer,!5Linker with transformer
......@@ -3,7 +3,6 @@ transformers = 4.16.2
[DATASET_PARAMS]
symbols_vocab_size=26
atom_vocab_size=17
max_len_sentence=266
max_atoms_in_sentence=1250
max_atoms_in_one_type=510
......
......@@ -11,9 +11,9 @@ class PositionalEncoding(nn.Module):
position = torch.arange(max_len).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model))
pe = torch.zeros(max_len, 1, d_model)
pe[:, 0, 0::2] = torch.sin(position * div_term)
pe[:, 0, 1::2] = torch.cos(position * div_term)
pe = torch.zeros(1, max_len, d_model)
pe[0, :, 0::2] = torch.sin(position * div_term)
pe[0, :, 1::2] = torch.cos(position * div_term)
self.register_buffer('pe', pe)
def forward(self, x):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment