From 92f65c05e761926cc9ceb1b3ef457a4f6db0fb2d Mon Sep 17 00:00:00 2001
From: Caroline DE POURTALES <cdepourt@montana.irit.fr>
Date: Mon, 30 May 2022 16:02:53 +0200
Subject: [PATCH] correction on positional enco

---
 Configuration/config.ini   | 1 -
 Linker/PositionEncoding.py | 6 +++---
 2 files changed, 3 insertions(+), 4 deletions(-)

diff --git a/Configuration/config.ini b/Configuration/config.ini
index ed41d1e..53d2d9a 100644
--- a/Configuration/config.ini
+++ b/Configuration/config.ini
@@ -3,7 +3,6 @@ transformers = 4.16.2
 
 [DATASET_PARAMS]
 symbols_vocab_size=26
-atom_vocab_size=17
 max_len_sentence=266
 max_atoms_in_sentence=1250
 max_atoms_in_one_type=510
diff --git a/Linker/PositionEncoding.py b/Linker/PositionEncoding.py
index 0c7d11c..d0d6524 100644
--- a/Linker/PositionEncoding.py
+++ b/Linker/PositionEncoding.py
@@ -11,9 +11,9 @@ class PositionalEncoding(nn.Module):
 
         position = torch.arange(max_len).unsqueeze(1)
         div_term = torch.exp(torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model))
-        pe = torch.zeros(max_len, 1, d_model)
-        pe[:, 0, 0::2] = torch.sin(position * div_term)
-        pe[:, 0, 1::2] = torch.cos(position * div_term)
+        pe = torch.zeros(1, max_len, d_model)
+        pe[0, :, 0::2] = torch.sin(position * div_term)
+        pe[0, :, 1::2] = torch.cos(position * div_term)
         self.register_buffer('pe', pe)
 
     def forward(self, x):
-- 
GitLab