mirror of
https://github.com/deepseek-ai/DeepEP
synced 2025-06-26 18:28:11 +00:00
Fix typo
This commit is contained in:
parent
2a3cac903a
commit
55cdd9a64f
@ -204,7 +204,7 @@ def combine_backward(grad_combined_x: Union[torch.Tensor, Tuple[torch.Tensor, to
|
|||||||
global _buffer
|
global _buffer
|
||||||
|
|
||||||
# The backward process of MoE combine is actually a dispatch
|
# The backward process of MoE combine is actually a dispatch
|
||||||
# For more advanced usages, please refer to the docs of the `combine` function
|
# For more advanced usages, please refer to the docs of the `dispatch` function
|
||||||
grad_x, _, _, _, _, event = _buffer.dispatch(grad_combined_x, handle=handle, async_finish=True,
|
grad_x, _, _, _, _, event = _buffer.dispatch(grad_combined_x, handle=handle, async_finish=True,
|
||||||
previous_event=previous_event,
|
previous_event=previous_event,
|
||||||
allocate_on_comm_stream=previous_event is not None)
|
allocate_on_comm_stream=previous_event is not None)
|
||||||
|
Loading…
Reference in New Issue
Block a user