diff --git a/src/adapters/model_mixin.py b/src/adapters/model_mixin.py index 4a6344c8a..1c7b803bf 100644 --- a/src/adapters/model_mixin.py +++ b/src/adapters/model_mixin.py @@ -1526,7 +1526,7 @@ def merge_adapter(self, name: str): name (str): LoRA module to merge. """ with ForwardContext(self, torch.empty(0, 1)): - #check if there are shared parameters between adapter weights + # check if there are shared parameters between adapter weights if self.base_model.shared_parameters: ForwardContext.get_context().shared_parameters = self.base_model.shared_parameters