mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
27 lines
865 B
27 lines
865 B
from .collective import all_gather, reduce_scatter, all_reduce, broadcast, reduce
|
|
from .p2p import (send_forward, send_forward_recv_forward, send_backward_recv_forward, send_backward,
|
|
send_backward_recv_backward, send_forward_recv_backward, send_forward_backward_recv_forward_backward,
|
|
recv_forward, recv_backward)
|
|
from .ring import ring_forward
|
|
from .utils import send_obj_meta, recv_obj_meta
|
|
|
|
__all__ = [
|
|
'all_gather',
|
|
'reduce_scatter',
|
|
'all_reduce',
|
|
'broadcast',
|
|
'reduce',
|
|
'send_forward',
|
|
'send_forward_recv_forward',
|
|
'send_forward_backward_recv_forward_backward',
|
|
'send_backward',
|
|
'send_backward_recv_backward',
|
|
'send_backward_recv_forward',
|
|
'send_forward_recv_backward',
|
|
'recv_backward',
|
|
'recv_forward',
|
|
'ring_forward',
|
|
'send_obj_meta',
|
|
'recv_obj_meta',
|
|
]
|