mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
35 lines
868 B
35 lines
868 B
from .collective import all_gather, all_reduce, broadcast, reduce, reduce_scatter
|
|
from .p2p import (
|
|
recv_backward,
|
|
recv_forward,
|
|
send_backward,
|
|
send_backward_recv_backward,
|
|
send_backward_recv_forward,
|
|
send_forward,
|
|
send_forward_backward_recv_forward_backward,
|
|
send_forward_recv_backward,
|
|
send_forward_recv_forward,
|
|
)
|
|
from .ring import ring_forward
|
|
from .utils import recv_obj_meta, send_obj_meta
|
|
|
|
__all__ = [
|
|
"all_gather",
|
|
"reduce_scatter",
|
|
"all_reduce",
|
|
"broadcast",
|
|
"reduce",
|
|
"send_forward",
|
|
"send_forward_recv_forward",
|
|
"send_forward_backward_recv_forward_backward",
|
|
"send_backward",
|
|
"send_backward_recv_backward",
|
|
"send_backward_recv_forward",
|
|
"send_forward_recv_backward",
|
|
"recv_backward",
|
|
"recv_forward",
|
|
"ring_forward",
|
|
"send_obj_meta",
|
|
"recv_obj_meta",
|
|
]
|