ankitkushwaha90 commited on
Commit
a44ffc6
·
verified ·
1 Parent(s): 8890885

Update orginal_transformer.py

Browse files
Files changed (1) hide show
  1. orginal_transformer.py +1 -1
orginal_transformer.py CHANGED
@@ -47,7 +47,7 @@ class PositionalEncoding(nn.Module):
47
  super().__init__()
48
  pe = torch.zeros(max_seq_length, d_model)
49
  position = torch.arange(0, max_seq_length, dtype=torch.float).unsqueeze(1)
50
- div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model)
51
 
52
  pe[:, 0::2] = torch.sin(position * div_term)
53
  pe[:, 1::2] = torch.cos(position * div_term)
 
47
  super().__init__()
48
  pe = torch.zeros(max_seq_length, d_model)
49
  position = torch.arange(0, max_seq_length, dtype=torch.float).unsqueeze(1)
50
+ div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
51
 
52
  pe[:, 0::2] = torch.sin(position * div_term)
53
  pe[:, 1::2] = torch.cos(position * div_term)