Closed
Description
Your current environment
The output of python collect_env.py
Your output of `python collect_env.py` here
🐛 Describe the bug
vllm serve /data/deepseek-ai/DeepSeek-R1 -tp 4 -pp 4 --data-parallel-size 4 --data-parallel-size-local 0 --data-parallel-address 10.254.20.30 --data-parallel-rpc-port 5555 --data-parallel-backend ray
Traceback (most recent call last):
File "/data/kebe/conda/envs/vllm-dev/bin/vllm", line 8, in <module>
sys.exit(main())
^^^^^^
File "/data/kebe/vllm/vllm/entrypoints/cli/main.py", line 59, in main
args.dispatch_function(args)
File "/data/kebe/vllm/vllm/entrypoints/cli/serve.py", line 58, in cmd
uvloop.run(run_server(args))
File "/data/kebe/conda/envs/vllm-dev/lib/python3.12/site-packages/uvloop/__init__.py", line 109, in run
return __asyncio.run(
^^^^^^^^^^^^^^
File "/data/kebe/conda/envs/vllm-dev/lib/python3.12/asyncio/runners.py", line 195, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/data/kebe/conda/envs/vllm-dev/lib/python3.12/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "uvloop/loop.pyx", line 1518, in uvloop.loop.Loop.run_until_complete
File "/data/kebe/conda/envs/vllm-dev/lib/python3.12/site-packages/uvloop/__init__.py", line 61, in wrapper
return await main
^^^^^^^^^^
File "/data/kebe/vllm/vllm/entrypoints/openai/api_server.py", line 1325, in run_server
await run_server_worker(listen_address, sock, args, **uvicorn_kwargs)
File "/data/kebe/vllm/vllm/entrypoints/openai/api_server.py", line 1345, in run_server_worker
async with build_async_engine_client(args, client_config) as engine_client:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/data/kebe/conda/envs/vllm-dev/lib/python3.12/contextlib.py", line 210, in __aenter__
return await anext(self.gen)
^^^^^^^^^^^^^^^^^^^^^
File "/data/kebe/vllm/vllm/entrypoints/openai/api_server.py", line 157, in build_async_engine_client
async with build_async_engine_client_from_engine_args(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/data/kebe/conda/envs/vllm-dev/lib/python3.12/contextlib.py", line 210, in __aenter__
return await anext(self.gen)
^^^^^^^^^^^^^^^^^^^^^
File "/data/kebe/vllm/vllm/entrypoints/openai/api_server.py", line 193, in build_async_engine_client_from_engine_args
async_llm = AsyncLLM.from_vllm_config(
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/data/kebe/vllm/vllm/v1/engine/async_llm.py", line 162, in from_vllm_config
return cls(
^^^^
File "/data/kebe/vllm/vllm/v1/engine/async_llm.py", line 124, in __init__
self.engine_core = EngineCoreClient.make_async_mp_client(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/data/kebe/vllm/vllm/v1/engine/core_client.py", line 89, in make_async_mp_client
return RayDPClient(vllm_config, executor_class, log_stats,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/data/kebe/vllm/vllm/v1/engine/core_client.py", line 1102, in __init__
super().__init__(vllm_config, executor_class, log_stats,
File "/data/kebe/vllm/vllm/v1/engine/core_client.py", line 919, in __init__
super().__init__(vllm_config, executor_class, log_stats,
File "/data/kebe/vllm/vllm/v1/engine/core_client.py", line 716, in __init__
super().__init__(
File "/data/kebe/vllm/vllm/v1/engine/core_client.py", line 422, in __init__
self._init_engines_direct(vllm_config, local_only,
File "/data/kebe/vllm/vllm/v1/engine/core_client.py", line 1128, in _init_engines_direct
self.resources.engine_manager = CoreEngineActorManager(
^^^^^^^^^^^^^^^^^^^^^^^
File "/data/kebe/vllm/vllm/v1/utils.py", line 341, in __init__
CoreEngineActorManager.create_dp_placement_groups(vllm_config)
File "/data/kebe/vllm/vllm/v1/utils.py", line 431, in create_dp_placement_groups
for i in range(available_engine_count):
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: 'float' object cannot be interpreted as an integer
Before submitting a new issue...
- Make sure you already searched for relevant issues, and asked the chatbot living at the bottom right corner of the documentation page, which can answer lots of frequently asked questions.