Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions vllm/lora/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ def from_local_checkpoint(
f" target modules in {expected_lora_modules}"
f" but received {unexpected_modules}."
f" Please verify that the loaded LoRA module is correct")
tensors = torch.load(lora_bin_file_path)
tensors = torch.load(lora_bin_file_path, map_location="device")
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

did you test it? I don't think "device" works. this is a string.

else:
raise ValueError(f"{lora_dir} doesn't contain tensors")

Expand All @@ -257,7 +257,8 @@ def from_local_checkpoint(
embeddings = safetensors.torch.load_file(
new_embeddings_tensor_path)
elif os.path.isfile(new_embeddings_bin_file_path):
embeddings = torch.load(new_embeddings_bin_file_path)
embeddings = torch.load(new_embeddings_bin_file_path,
map_location="device")

rank = config["r"]
lora_alpha = config["lora_alpha"]
Expand Down