Skip to content

Commit

Permalink
fix the embedding saving for adaption prompt (#1314)
Browse files Browse the repository at this point in the history
* fix the embedding saving for adaption prompt

* fix

* automate setting `save_embedding_layers` when embedding layer is resized during finetuning

* fix

* address comment

Co-Authored-By: Benjamin Bossan <BenjaminBossan@users.noreply.github.com>

* oops

---------

Co-authored-by: Benjamin Bossan <BenjaminBossan@users.noreply.github.com>
  • Loading branch information
pacman100 and BenjaminBossan authored Jan 3, 2024
1 parent 2a0fb71 commit cbf346d
Showing 1 changed file with 15 additions and 4 deletions.
19 changes: 15 additions & 4 deletions src/peft/utils/save_and_load.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from huggingface_hub import file_exists, hf_hub_download
from huggingface_hub.utils import EntryNotFoundError
from safetensors.torch import load_file as safe_load_file
from transformers import AutoConfig

from .other import EMBEDDING_LAYER_NAMES, SAFETENSORS_WEIGHTS_NAME, WEIGHTS_NAME, infer_device
from .peft_types import PeftType
Expand All @@ -33,7 +34,7 @@ def has_valid_embedding_base_layer(layer):
def get_embedding_layer_name(model, layer, is_prompt_learning):
"""Get the name of the embedding module for a given layer."""
for name, module in model.named_modules():
if (is_prompt_learning and module == layer) or module == layer.base_layer:
if (is_prompt_learning and module == layer) or module == getattr(layer, "base_layer", None):
return name
return None

Expand Down Expand Up @@ -131,13 +132,23 @@ def get_peft_model_state_dict(
warnings.warn("Setting `save_embedding_layers` to `True` as embedding layers found in `target_modules`.")
save_embedding_layers = True
elif save_embedding_layers == "auto":
save_embedding_layers = False
vocab_size = getattr(getattr(model, "config", None), "vocab_size", None)
model_id = getattr(config, "base_model_name_or_path", None)
# check if the vocab size of the base model is different from the vocab size of the finetuned model
if vocab_size and model_id and (vocab_size != AutoConfig.from_pretrained(model_id).vocab_size):
warnings.warn(
"Setting `save_embedding_layers` to `True` as the embedding layer has been resized during finetuning."
)
save_embedding_layers = True
else:
save_embedding_layers = False

if save_embedding_layers and hasattr(model, "get_input_embeddings"):
is_prompt_learning_method = config.is_prompt_learning or config.peft_type == PeftType.ADAPTION_PROMPT
for layer in [model.get_input_embeddings(), model.get_output_embeddings()]:
if config.is_prompt_learning or has_valid_embedding_base_layer(layer):
if is_prompt_learning_method or has_valid_embedding_base_layer(layer):
# support from version >= 0.6.2
embedding_module_name = get_embedding_layer_name(model, layer, config.is_prompt_learning)
embedding_module_name = get_embedding_layer_name(model, layer, is_prompt_learning_method)
if embedding_module_name:
to_return.update({k: v for k, v in state_dict.items() if embedding_module_name in k})
elif save_embedding_layers:
Expand Down

0 comments on commit cbf346d

Please sign in to comment.