From 09e39741ad0473c46579485073176d7dd3c265fa Mon Sep 17 00:00:00 2001 From: AlphaINF <876066505@qq.com> Date: Fri, 29 Nov 2024 11:55:00 +0800 Subject: [PATCH 1/3] support download Lora Model from ModelScope and download private model from ModelScope --- benchmarks/backend_request_func.py | 6 ++++++ vllm/lora/utils.py | 22 ++++++++++++++++------ 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/benchmarks/backend_request_func.py b/benchmarks/backend_request_func.py index c3fed56e8a95..36227ef251b1 100644 --- a/benchmarks/backend_request_func.py +++ b/benchmarks/backend_request_func.py @@ -399,6 +399,12 @@ async def async_request_openai_chat_completions( def get_model(pretrained_model_name_or_path: str) -> str: if os.getenv('VLLM_USE_MODELSCOPE', 'False').lower() == 'true': + if os.getenv('MODELSCOPE_ACCESS_TOKEN', ''): + from modelscope.hub.api import HubApi + access_token = os.getenv('MODELSCOPE_ACCESS_TOKEN') + api = HubApi() + api.login(access_token) + from modelscope import snapshot_download model_path = snapshot_download( diff --git a/vllm/lora/utils.py b/vllm/lora/utils.py index 5876494ce282..6f55eee0ac4b 100644 --- a/vllm/lora/utils.py +++ b/vllm/lora/utils.py @@ -178,15 +178,25 @@ def get_adapter_absolute_path(lora_path: str) -> str: if os.path.exists(lora_path): return os.path.abspath(lora_path) - # If the path does not exist locally, assume it's a Hugging Face repo. + # If the path does not exist locally, assume it's a Hugging Face repo or ModelScope repo. try: - local_snapshot_path = huggingface_hub.snapshot_download( - repo_id=lora_path) - except (HfHubHTTPError, RepositoryNotFoundError, EntryNotFoundError, - HFValidationError): + if os.getenv('VLLM_USE_MODELSCOPE', 'False').lower() == 'true': + if os.getenv('MODELSCOPE_ACCESS_TOKEN', ''): + from modelscope.hub.api import HubApi + access_token = os.getenv('MODELSCOPE_ACCESS_TOKEN') + api = HubApi() + api.login(access_token) + + from modelscope import snapshot_download + + local_snapshot_path = snapshot_download(model_id=lora_path) + else: + local_snapshot_path = huggingface_hub.snapshot_download( + repo_id=lora_path) + except Exception as e: # Handle errors that may occur during the download # Return original path instead instead of throwing error here - logger.exception("Error downloading the HuggingFace model") + logger.exception("Error downloading the HuggingFace or ModelScope model") return lora_path return local_snapshot_path From 60a86463757e052705aaf8dd8b8819c609f97746 Mon Sep 17 00:00:00 2001 From: AlphaINF <876066505@qq.com> Date: Fri, 29 Nov 2024 12:00:46 +0800 Subject: [PATCH 2/3] remove some unused import --- vllm/lora/utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/vllm/lora/utils.py b/vllm/lora/utils.py index 6f55eee0ac4b..c9e608e0a70e 100644 --- a/vllm/lora/utils.py +++ b/vllm/lora/utils.py @@ -3,8 +3,6 @@ from typing import List, Optional, Set, Tuple, Type, Union import huggingface_hub -from huggingface_hub.utils import (EntryNotFoundError, HfHubHTTPError, - HFValidationError, RepositoryNotFoundError) from torch import nn from transformers import PretrainedConfig From a12b5fa6b35bccbb1ffa6018ca3f50f91ef695cb Mon Sep 17 00:00:00 2001 From: AlphaINF <876066505@qq.com> Date: Fri, 29 Nov 2024 12:03:40 +0800 Subject: [PATCH 3/3] fix line too long --- vllm/lora/utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/vllm/lora/utils.py b/vllm/lora/utils.py index c9e608e0a70e..3871b2c784bd 100644 --- a/vllm/lora/utils.py +++ b/vllm/lora/utils.py @@ -176,7 +176,8 @@ def get_adapter_absolute_path(lora_path: str) -> str: if os.path.exists(lora_path): return os.path.abspath(lora_path) - # If the path does not exist locally, assume it's a Hugging Face repo or ModelScope repo. + # If the path does not exist locally, + # assume it's a Hugging Face repo or ModelScope repo. try: if os.getenv('VLLM_USE_MODELSCOPE', 'False').lower() == 'true': if os.getenv('MODELSCOPE_ACCESS_TOKEN', ''): @@ -191,10 +192,11 @@ def get_adapter_absolute_path(lora_path: str) -> str: else: local_snapshot_path = huggingface_hub.snapshot_download( repo_id=lora_path) - except Exception as e: + except Exception: # Handle errors that may occur during the download # Return original path instead instead of throwing error here - logger.exception("Error downloading the HuggingFace or ModelScope model") + logger.exception( + "Error downloading the HuggingFace or ModelScope model") return lora_path return local_snapshot_path