Skip to content

Commit 835c8e3

Browse files
committed
[MISC] Remove useless patch
Signed-off-by: wangxiyuan <wangxiyuan1007@gmail.com>
1 parent e112317 commit 835c8e3

File tree

2 files changed

+0
-29
lines changed

2 files changed

+0
-29
lines changed

vllm_ascend/patch/__init__.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -56,16 +56,6 @@
5656
# Need a PR to vllm to support get port from environment.
5757
# Future Plan:
5858
# Remove those patch when vllm merged them
59-
# 3. `vllm.config.ParallelConfig.ParallelConfig.stateless_init_dp_group`
60-
# Why:
61-
# vLLM use gloo backend by default to initialize stateless dp process gourp, but we want to use hccl here to
62-
# get better performance
63-
# How:
64-
# adopt nccl backend to init process group.(Now we still use gloo, it's just a placeholder, we'll use nccl in the future)
65-
# Related PR (if no, explain why):
66-
# Need a PR to vllm to support more backend.
67-
# Future Plan:
68-
# Remove those patch when vllm support more backend.
6959
#
7060
# * Worker Patch:
7161
# ===============

vllm_ascend/patch/platform/patch_common/patch_distributed.py

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,6 @@
2323
import vllm.envs as envs
2424
from torch.distributed import ProcessGroup
2525
from vllm.config import ParallelConfig
26-
from vllm.distributed.utils import \
27-
stateless_init_torch_distributed_process_group
2826

2927
from vllm_ascend.utils import NullHandle, is_310p
3028

@@ -65,25 +63,8 @@ def parallel_config_get_dp_port(self) -> int:
6563
return port
6664

6765

68-
def stateless_init_dp_group(self) -> "ProcessGroup":
69-
# TODO(Yizhou): Currently we have to set the backend to gloo
70-
# because in vllm.config.ParallelConfig.has_unfinished_dp the
71-
# device is set to cpu. We need to fix this in the future.
72-
# We need to compare the performance of gloo and hccl and then
73-
# decide which one to use.
74-
dp_group = stateless_init_torch_distributed_process_group(
75-
self.data_parallel_master_ip,
76-
self.get_next_dp_init_port(),
77-
self.data_parallel_rank,
78-
self.data_parallel_size,
79-
backend="gloo")
80-
81-
return dp_group
82-
83-
8466
vllm.distributed.parallel_state.destroy_model_parallel = ascend_destroy_model_parallel
8567
ParallelConfig.get_next_dp_init_port = parallel_config_get_dp_port
86-
ParallelConfig.stateless_init_dp_group = stateless_init_dp_group
8768

8869

8970
def communication_adaptation_310p():

0 commit comments

Comments
 (0)