From 4fd6d6ddd0969cae6801d38d9274ae138b39ba77 Mon Sep 17 00:00:00 2001 From: "arthur.zucker@gmail.com" Date: Thu, 25 May 2023 08:52:42 +0000 Subject: [PATCH] nits --- src/transformers/models/longformer/modeling_longformer.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/transformers/models/longformer/modeling_longformer.py b/src/transformers/models/longformer/modeling_longformer.py index 7f088259c8c1e9..cd975380be553b 100755 --- a/src/transformers/models/longformer/modeling_longformer.py +++ b/src/transformers/models/longformer/modeling_longformer.py @@ -438,7 +438,6 @@ class LongformerEmbeddings(nn.Module): def __init__(self, config): super().__init__() self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id) - self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size) # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load @@ -446,8 +445,6 @@ def __init__(self, config): self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps) self.dropout = nn.Dropout(config.hidden_dropout_prob) - self.position_embedding_type = getattr(config, "position_embedding_type", "absolute") - self.padding_idx = config.pad_token_id self.position_embeddings = nn.Embedding( config.max_position_embeddings, config.hidden_size, padding_idx=self.padding_idx