Skip to content

Commit

Permalink
Drop ROCm load format check (vllm-project#10767)
Browse files Browse the repository at this point in the history
Signed-off-by: wangxiyuan <wangxiyuan1007@gmail.com>
  • Loading branch information
wangxiyuan authored Dec 4, 2024
1 parent d2bd88b commit b5b647b
Showing 1 changed file with 3 additions and 20 deletions.
23 changes: 3 additions & 20 deletions vllm/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -931,7 +931,9 @@ def __post_init__(self):
if isinstance(model_loader_extra_config, str):
self.model_loader_extra_config = json.loads(
model_loader_extra_config)
self._verify_load_format()
if isinstance(self.load_format, str):
load_format = self.load_format.lower()
self.load_format = LoadFormat(load_format)

if self.ignore_patterns is not None and len(self.ignore_patterns) > 0:
logger.info(
Expand All @@ -940,25 +942,6 @@ def __post_init__(self):
else:
self.ignore_patterns = ["original/**/*"]

def _verify_load_format(self) -> None:
if not isinstance(self.load_format, str):
return

load_format = self.load_format.lower()
self.load_format = LoadFormat(load_format)

rocm_not_supported_load_format: List[str] = []
if current_platform.is_rocm(
) and load_format in rocm_not_supported_load_format:
rocm_supported_load_format = [
f for f in LoadFormat.__members__
if (f not in rocm_not_supported_load_format)
]
raise ValueError(
f"load format '{load_format}' is not supported in ROCm. "
f"Supported load formats are "
f"{rocm_supported_load_format}")


@dataclass
class ParallelConfig:
Expand Down

0 comments on commit b5b647b

Please sign in to comment.