We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents 0687656 + 9935039 commit ee3faa9Copy full SHA for ee3faa9
vllm/lora/models.py
@@ -216,6 +216,10 @@ def check_unexpected_modules(modules: dict):
216
for lora_module in modules.keys(): # noqa
217
module_name, _ = parse_fine_tuned_lora_name(lora_module, weights_mapper)
218
part_name = module_name.split(".")[-1]
219
+ # Handle FSDP file format where experts.base_layer is the
220
+ # gate_up_proj and experts is the down_proj
221
+ if "base_layer" in lora_module:
222
+ continue
223
if part_name not in expected_lora_modules:
224
unexpected_modules.append(module_name)
225
if unexpected_modules:
0 commit comments