Skip to content

Commit 4032949

Browse files
authored
[Bugfix] Fix DeepEP config for DP4TP4 (#23619)
Signed-off-by: Ming Yang <[email protected]>
1 parent 08abfa7 commit 4032949

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

vllm/model_executor/layers/fused_moe/deepep_ht_prepare_finalize.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,14 +49,14 @@ def topk_indices_dtype(self) -> Optional[torch.dtype]:
4949
return torch.int64
5050

5151
def _get_dispatch_config(self) -> Optional[deep_ep.Config]:
52-
if self.dp_size not in self.available_rank_configs:
52+
if self.num_dispatchers_ not in self.available_rank_configs:
5353
return None
54-
return deep_ep.Buffer.get_dispatch_config(self.dp_size)
54+
return deep_ep.Buffer.get_dispatch_config(self.num_dispatchers_)
5555

5656
def _get_combine_config(self) -> Optional[deep_ep.Config]:
57-
if self.dp_size not in self.available_rank_configs:
57+
if self.num_dispatchers_ not in self.available_rank_configs:
5858
return None
59-
return deep_ep.Buffer.get_combine_config(self.dp_size)
59+
return deep_ep.Buffer.get_combine_config(self.num_dispatchers_)
6060

6161
def _do_dispatch(
6262
self,

0 commit comments

Comments
 (0)