Skip to content

Commit

Permalink
remove checking for base models as it seems ok for them to go through…
Browse files Browse the repository at this point in the history
…, and the checking might fail anyway
  • Loading branch information
ranchlai committed Jul 28, 2023
1 parent 4aae042 commit 2ef72c1
Showing 1 changed file with 8 additions and 10 deletions.
18 changes: 8 additions & 10 deletions src/transformers/utils/bitsandbytes.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,20 +264,18 @@ def get_keys_to_not_convert(model):
else:
tied_keys = sum(tied_params, [])
has_tied_params = len(tied_keys) > 0

# Check if it is a base model
is_base_model = not hasattr(model, model.base_model_prefix)

# Ignore this for base models (BertModel, GPT2Model, etc.)
if (not has_tied_params) and is_base_model:
return []

if not has_tied_params:
# if the model has no tied parameters, we check if the output embeddings are the same as the input embeddings
# if they are the same, it just means that the model is using the same weights for the input and output
if id(model.get_output_embeddings()) == id(model.get_input_embeddings()):
has_tied_params = True
# In addition to tied-parameters, we will also keep the "lm-head" in full precision for numerical stability reasons
output_embeddings = model.get_output_embeddings()
# If no output-embeddings are found, and no tied parameters, we quantize the whole model
# If no output-embeddings are found, and no tied parameters, we have to quantize the whole model
if output_embeddings is None and not has_tied_params:
logger.warning(
"No output embeddings and tied parameters found, will quantize the whole model. "
"This could lead to numerical instability if word/output embeddings are quantized. "
"This might lead to numerical instability if word/output embeddings are quantized. "
"If this is not intended, please check tied parameters definition in your model architecture "
"or override `get_output_embeddings()` to return correct output embeddings in your model class."
)
Expand Down

0 comments on commit 2ef72c1

Please sign in to comment.