Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions vllm/model_executor/models/llava.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from .siglip import (SiglipVisionModel, dummy_image_for_siglip,
dummy_seq_data_for_siglip, get_max_siglip_image_tokens,
input_processor_for_siglip)
from .utils import (filter_weights, flatten_bn, init_vllm_registered_model,
from .utils import (check_filter_available, filter_weights, flatten_bn, init_vllm_registered_model,
merge_multimodal_embeddings)


Expand Down Expand Up @@ -393,7 +393,8 @@ def sample(

def load_weights(self, weights: Iterable[Tuple[str, torch.Tensor]]):
# prepare weight iterators for components
vit_weights, mlp_weights, llm_weights = itertools.tee(weights, 3)
vit_weights, mlp_weights, llm_weights, tracker = itertools.tee(weights, 4)
check_filter_available(tracker, ["vision_tower", "multi_modal_projector", "language_model"])

# load vision encoder
vit_weights = filter_weights(vit_weights, "vision_tower")
Expand Down
14 changes: 14 additions & 0 deletions vllm/model_executor/models/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,20 @@ def filter_weights(weights: Iterable[Tuple[str, torch.Tensor]], prefix: str):
yield name, loaded_weight


def check_filter_available(weights: Iterable[Tuple[str, torch.Tensor]], prefix: List[str]):
"""
Helper function to check if the filter is available for the loaded weights
"""
unexpected_name = set()
for name, _ in weights:
weight_prefix = name.split(".")[0]
if weight_prefix not in prefix:
unexpected_name.add(name)

if unexpected_name:
raise ValueError(f"Loaded weights contents unexpected weight: {unexpected_name}")


def init_vllm_registered_model(
hf_config: PretrainedConfig,
cache_config: Optional[CacheConfig],
Expand Down