Skip to content

Commit

Permalink
no longer need unexpected keys fn
Browse files Browse the repository at this point in the history
  • Loading branch information
Satrat committed Sep 3, 2024
1 parent 1c3ad5c commit aa1a4f9
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 11 deletions.
1 change: 0 additions & 1 deletion src/transformers/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4182,7 +4182,6 @@ def _fix_key(key):
for pat in cls._keys_to_ignore_on_load_unexpected:
unexpected_keys = [k for k in unexpected_keys if re.search(pat, k) is None]
if hf_quantizer is not None:
unexpected_keys = hf_quantizer.update_unexpected_keys(model, unexpected_keys, prefix)
missing_keys = hf_quantizer.update_missing_keys(model, missing_keys, prefix)

# retrieve weights on meta device and put them back on CPU.
Expand Down
10 changes: 0 additions & 10 deletions src/transformers/quantizers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,16 +99,6 @@ def adjust_target_dtype(self, torch_dtype: "torch.dtype") -> "torch.dtype":
"""
return torch_dtype

def update_unexpected_keys(self, model, unexpected_keys: List[str], prefix: str) -> List[str]:
"""
Override this method if you want to adjust the `unexpected_keys`.
Args:
unexpected_keys (`List[str]`, *optional*):
The list of unexpected keys in the state dict of the model compared to the checkpoint
"""
return unexpected_keys

def update_missing_keys(self, model, missing_keys: List[str], prefix: str) -> List[str]:
"""
Override this method if you want to adjust the `missing_keys`.
Expand Down

0 comments on commit aa1a4f9

Please sign in to comment.