Skip to content

Commit

Permalink
fix hf model generate: add forward context args in forward signature
Browse files Browse the repository at this point in the history
  • Loading branch information
Francois Ledoyen authored and Francois Ledoyen committed Feb 17, 2025
1 parent a5160d3 commit 0acaf0a
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 1 deletion.
26 changes: 26 additions & 0 deletions src/adapters/context.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import functools
import inspect
import threading
from typing import ContextManager

Expand Down Expand Up @@ -121,6 +122,31 @@ def _call_forward(self, model, f, *args, **kwargs):

return results

@classmethod
def add_contex_args_in_signature(cls, f):
old_signature = inspect.signature(f)
params = list(old_signature.parameters.values())
# search if a VAR_POSITIONAL or VAR_KEYWORD is present
# if yes insert step parameter before it, else insert it in last position
param_types = [param.kind for param in params]
i = min(
[
(param_types.index(param_type) if param_type in param_types else float("inf"))
for param_type in (
inspect.Parameter.VAR_POSITIONAL,
inspect.Parameter.VAR_KEYWORD,
)
]
+ [len(params)]
)
for name in cls.context_args:
new_param = inspect.Parameter(name, inspect.Parameter.POSITIONAL_OR_KEYWORD, default=None)
if new_param not in params:
params.insert(i, new_param)
# we can now build the signature for the wrapper function
new_signature = old_signature.replace(parameters=params)
return new_signature

@classmethod
def wrap_base(cls, f):

Expand Down
3 changes: 2 additions & 1 deletion src/adapters/wrappers/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from torch import nn

from adapters.context import ForwardContext
from transformers import PreTrainedModel
from transformers.models.auto.auto_factory import getattribute_from_module
from transformers.models.auto.configuration_auto import model_type_to_module_name
Expand Down Expand Up @@ -80,7 +81,7 @@ def init(model: PreTrainedModel, adapters_config: Optional[ModelAdaptersConfig]
base_model = getattr(model, model.base_model_prefix)
if isinstance(base_model, ModelAdaptersMixin):
# HACK to preserve original forward method signature (e.g. for Trainer label names)
temp_signature = inspect.signature(model.forward.__func__)
temp_signature = ForwardContext.add_contex_args_in_signature(model.forward.__func__)
# Create new wrapper model class
model_class_name = model.__class__.__name__
model_class = type(
Expand Down

0 comments on commit 0acaf0a

Please sign in to comment.