diff --git a/layers/Embed.py b/layers/Embed.py index 977e25568..6bdef6d57 100644 --- a/layers/Embed.py +++ b/layers/Embed.py @@ -10,7 +10,7 @@ def __init__(self, d_model, max_len=5000): super(PositionalEmbedding, self).__init__() # Compute the positional encodings once in log space. pe = torch.zeros(max_len, d_model).float() - pe.require_grad = False + pe.requires_grad = False position = torch.arange(0, max_len).float().unsqueeze(1) div_term = (torch.arange(0, d_model, 2).float() @@ -47,7 +47,7 @@ def __init__(self, c_in, d_model): super(FixedEmbedding, self).__init__() w = torch.zeros(c_in, d_model).float() - w.require_grad = False + w.requires_grad = False position = torch.arange(0, c_in).float().unsqueeze(1) div_term = (torch.arange(0, d_model, 2).float()