-
Notifications
You must be signed in to change notification settings - Fork 31.3k
[PEFT] Fix PEFT multi adapters support
#26407
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 3 commits
05bbcd7
b2e32a5
d772435
87e304a
3dc62a3
9250ccf
c7ad2b4
9e93a26
f8e435f
a46e13b
4ebe629
9f538ad
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -12,7 +12,7 @@ | |
| # See the License for the specific language governing permissions and | ||
| # limitations under the License. | ||
| import inspect | ||
| from typing import TYPE_CHECKING, Any, Dict, Optional | ||
| from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union | ||
|
|
||
| from ..utils import ( | ||
| check_peft_version, | ||
|
|
@@ -245,20 +245,26 @@ def add_adapter(self, adapter_config, adapter_name: Optional[str] = None) -> Non | |
|
|
||
| self.set_adapter(adapter_name) | ||
|
|
||
| def set_adapter(self, adapter_name: str) -> None: | ||
| def set_adapter(self, adapter_name: Union[List[str], str]) -> None: | ||
| """ | ||
| If you are not familiar with adapters and PEFT methods, we invite you to read more about them on the PEFT | ||
| official documentation: https://huggingface.co/docs/peft | ||
|
|
||
| Sets a specific adapter by forcing the model to use a that adapter and disable the other adapters. | ||
|
|
||
| Args: | ||
| adapter_name (`str`): | ||
| The name of the adapter to set. | ||
| adapter_name (`Union[List[str], str]`): | ||
| The name of the adapter to set. Can be also a list of strings to set multiple adapters. | ||
| """ | ||
| check_peft_version(min_version=MIN_PEFT_VERSION) | ||
| if not self._hf_peft_config_loaded: | ||
| raise ValueError("No adapter loaded. Please load an adapter first.") | ||
| elif isinstance(adapter_name, list): | ||
| for adapter in adapter_name: | ||
|
||
| if adapter not in self.peft_config: | ||
| raise ValueError( | ||
| f"Adapter with name {adapter} not found. Please pass the correct adapter name among {list(self.peft_config.keys())}" | ||
| ) | ||
| elif adapter_name not in self.peft_config: | ||
| raise ValueError( | ||
| f"Adapter with name {adapter_name} not found. Please pass the correct adapter name among {list(self.peft_config.keys())}" | ||
|
|
@@ -270,7 +276,11 @@ def set_adapter(self, adapter_name: str) -> None: | |
|
|
||
| for _, module in self.named_modules(): | ||
| if isinstance(module, BaseTunerLayer): | ||
| module.active_adapter = adapter_name | ||
| # For backward compatbility with previous PEFT versions | ||
| if hasattr(module, "set_adapter"): | ||
| module.set_adapter(adapter_name) | ||
| else: | ||
| module.active_adapter = adapter_name | ||
| _adapters_has_been_set = True | ||
|
|
||
| if not _adapters_has_been_set: | ||
|
|
@@ -294,7 +304,11 @@ def disable_adapters(self) -> None: | |
|
|
||
| for _, module in self.named_modules(): | ||
| if isinstance(module, BaseTunerLayer): | ||
| module.disable_adapters = True | ||
| # The recent version of PEFT need to call `enable_adapters` instead | ||
| if hasattr(module, "enable_adapters"): | ||
| module.enable_adapters(enabled=False) | ||
| else: | ||
| module.disable_adapters = True | ||
|
|
||
| def enable_adapters(self) -> None: | ||
| """ | ||
|
|
@@ -312,14 +326,24 @@ def enable_adapters(self) -> None: | |
|
|
||
| for _, module in self.named_modules(): | ||
| if isinstance(module, BaseTunerLayer): | ||
| module.disable_adapters = False | ||
| # The recent version of PEFT need to call `enable_adapters` instead | ||
| if hasattr(module, "enable_adapters"): | ||
| module.enable_adapters(enabled=True) | ||
| else: | ||
| module.disable_adapters = False | ||
|
|
||
| def active_adapter(self) -> str: | ||
| def active_adapter(self, return_multi_adapters: bool = True) -> Union[str, List[str]]: | ||
| """ | ||
| If you are not familiar with adapters and PEFT methods, we invite you to read more about them on the PEFT | ||
| official documentation: https://huggingface.co/docs/peft | ||
|
|
||
| Gets the current active adapter of the model. | ||
| Gets the current active adapter of the model. In case of multi-adapter inference (combining multiple adapters | ||
| for inference) returns the list of active adapters so that users can deal with them accordingly. | ||
|
|
||
| Args: | ||
| return_multi_adapters (`bool`, *optional*, defaults to `True`): | ||
| Whether to return a list of active adapters or not. If `False`, only the first adapter is returned. If | ||
| `True`, returns the list of active adapters. | ||
|
||
| """ | ||
| check_peft_version(min_version=MIN_PEFT_VERSION) | ||
|
|
||
|
|
@@ -333,7 +357,16 @@ def active_adapter(self) -> str: | |
|
|
||
| for _, module in self.named_modules(): | ||
| if isinstance(module, BaseTunerLayer): | ||
| return module.active_adapter | ||
| active_adapter = module.active_adapter | ||
| if isinstance(active_adapter, list) and not return_multi_adapters: | ||
| # In case the adapter name is a list (multiple adapters), we only consider the first one | ||
| active_adapter = active_adapter[0] | ||
|
|
||
| logger.warning( | ||
| "Multiple adapters detected, we will only consider the first adapter. If you want to get all active adapters, " | ||
| "call `active_adapter(return_multi_adapters=True)` instead." | ||
| ) | ||
| return active_adapter | ||
|
|
||
| def get_adapter_state_dict(self, adapter_name: Optional[str] = None) -> dict: | ||
| """ | ||
|
|
@@ -355,7 +388,7 @@ def get_adapter_state_dict(self, adapter_name: Optional[str] = None) -> dict: | |
| from peft import get_peft_model_state_dict | ||
|
|
||
| if adapter_name is None: | ||
| adapter_name = self.active_adapter() | ||
| adapter_name = self.active_adapter(return_multi_adapters=False) | ||
|
|
||
| adapter_state_dict = get_peft_model_state_dict(self, adapter_name=adapter_name) | ||
| return adapter_state_dict | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -2006,7 +2006,17 @@ def save_pretrained( | |
| peft_state_dict[f"base_model.model.{key}"] = value | ||
| state_dict = peft_state_dict | ||
|
|
||
| current_peft_config = self.peft_config[self.active_adapter()] | ||
| active_adapter = self.active_adapter() | ||
|
|
||
| if isinstance(active_adapter, list): | ||
| if len(active_adapter) > 1: | ||
| logger.warning( | ||
| "Multiple active adapters detected, will only consider the first active adapter. In order to save them all, please iteratively call `set_adapter()` on each" | ||
| " adapter name and save them one by one manually. " | ||
| ) | ||
| active_adapter = active_adapter[0] | ||
|
|
||
| current_peft_config = self.peft_config[active_adapter] | ||
|
||
| current_peft_config.save_pretrained(save_directory) | ||
|
|
||
| # Save the model | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -265,17 +265,31 @@ def test_peft_add_multi_adapter(self): | |
| _ = model.generate(input_ids=dummy_input) | ||
|
|
||
| model.set_adapter("default") | ||
| self.assertTrue(model.active_adapter() == "default") | ||
| self.assertTrue(model.active_adapter() == ["default"]) | ||
|
||
| self.assertTrue(model.active_adapter(return_multi_adapters=False) == "default") | ||
|
||
|
|
||
| model.set_adapter("adapter-2") | ||
| self.assertTrue(model.active_adapter() == "adapter-2") | ||
| self.assertTrue(model.active_adapter() == ["adapter-2"]) | ||
| self.assertTrue(model.active_adapter(return_multi_adapters=False) == "adapter-2") | ||
|
|
||
| # Logits comparison | ||
| self.assertFalse( | ||
| torch.allclose(logits_adapter_1.logits, logits_adapter_2.logits, atol=1e-6, rtol=1e-6) | ||
| ) | ||
| self.assertFalse(torch.allclose(logits_original_model, logits_adapter_2.logits, atol=1e-6, rtol=1e-6)) | ||
|
|
||
| model.set_adapter(["adapter-2", "default"]) | ||
| self.assertTrue(model.active_adapter() == ["adapter-2", "default"]) | ||
|
||
|
|
||
| logits_adapter_mixed = model(dummy_input) | ||
| self.assertFalse( | ||
| torch.allclose(logits_adapter_1.logits, logits_adapter_mixed.logits, atol=1e-6, rtol=1e-6) | ||
| ) | ||
|
|
||
| self.assertFalse( | ||
| torch.allclose(logits_adapter_2.logits, logits_adapter_mixed.logits, atol=1e-6, rtol=1e-6) | ||
| ) | ||
|
|
||
| @require_torch_gpu | ||
| def test_peft_from_pretrained_kwargs(self): | ||
| """ | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Let's do the same in diffusers :-) https://github.com/huggingface/diffusers/pull/5151/files#r1338774571