From 3955e990f90861a6c00c3dbf997cde4623fa37d4 Mon Sep 17 00:00:00 2001 From: YuAng Date: Mon, 16 Aug 2021 14:53:12 +0800 Subject: [PATCH] fix #41 --- src/loftr/utils/position_encoding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/loftr/utils/position_encoding.py b/src/loftr/utils/position_encoding.py index fe83798..4b7e812 100644 --- a/src/loftr/utils/position_encoding.py +++ b/src/loftr/utils/position_encoding.py @@ -18,7 +18,7 @@ class PositionEncodingSine(nn.Module): pe = torch.zeros((d_model, *max_shape)) y_position = torch.ones(max_shape).cumsum(0).float().unsqueeze(0) x_position = torch.ones(max_shape).cumsum(1).float().unsqueeze(0) - div_term = torch.exp(torch.arange(0, d_model//2, 2).float() * (-math.log(10000.0) / d_model//2)) + div_term = torch.exp(torch.arange(0, d_model//2, 2).float() * (-math.log(10000.0) / (d_model//2))) div_term = div_term[:, None, None] # [C//4, 1, 1] pe[0::4, :, :] = torch.sin(x_position * div_term) pe[1::4, :, :] = torch.cos(x_position * div_term)