[doc] update rst and docstring (#1351)

* update rst

* add zero docstr

* fix docstr

* remove fx.tracer.meta_patch

* fix docstr

* fix docstr

* update fx rst

* fix fx docstr

* remove useless rst
pull/1353/head
ver217 2022-07-21 15:54:53 +08:00 committed by GitHub
parent 274c1a3b5f
commit d068af81a3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
132 changed files with 724 additions and 146 deletions

View File

@ -60,7 +60,7 @@ class GradScaler(object):
* ``scaler.step(optimizer)`` safely unscales gradients and calls ``optimizer.step()``.
* ``scaler.update()`` updates ``scaler``'s scale factor.
Example::
Example:
# Creates a GradScaler once at the beginning of training.
scaler = GradScaler()

View File

@ -102,10 +102,10 @@ def sync_states():
def seed(parallel_mode: ParallelMode):
""" A context for seed switch
Examples::
Examples:
with seed(ParallelMode.DATA):
output = F.dropout(input)
>>> with seed(ParallelMode.DATA):
>>> output = F.dropout(input)
Note:
The parallel_mode should be concluded in ``ParallelMode``. More details about ``ParallelMode`` could be found
@ -124,18 +124,18 @@ def with_seed(func, parallel_mode: ParallelMode):
"""
A function wrapper which executes the function with a specified seed.
Examples::
Examples:
# use with decorator
@with_seed(ParallelMode.DATA)
def forward(input):
return F.dropout(input)
out = forward(input)
# OR use it inline
def forward(input):
return F.dropout(input)
wrapper_forward = with_seed(forward, ParallelMode.DATA)
out = wrapped_forward(input)
>>> # use with decorator
>>> @with_seed(ParallelMode.DATA)
>>> def forward(input):
>>> return F.dropout(input)
>>> out = forward(input)
>>> # OR use it inline
>>> def forward(input):
>>> return F.dropout(input)
>>> wrapper_forward = with_seed(forward, ParallelMode.DATA)
>>> out = wrapped_forward(input)
Note:
The parallel_mode should be concluded in ``ParallelMode``. More details about ``ParallelMode`` could be found

View File

@ -12,7 +12,8 @@ class ColoProxy(Proxy):
ColoProxy is a proxy class which uses meta tensor to handle data-dependent control flow. The original torch.fx proxy
cannot be used to infer the condition statement, with this proxy, torch.fx can still run even with if statements.
Usage:
Example::
proxy = tracer.create_proxy(...)
proxy.meta_data = torch.empty(4, 2, device='meta')
print(len(proxy)) # expect output 4

View File

@ -35,7 +35,8 @@ class ColoTracer(Tracer):
ColoTracer is a symbolic tracer designed to support dynamic control flow by using meta tensors for the `colossalai.fx` module.
This tracer is initialized in the same way as the original torch.fx.Tracer.
Usage:
Usage::
class Model(nn.Module):
def __init__(self):
super().__init__()
@ -254,8 +255,8 @@ class ColoTracer(Tracer):
non_meta_arg_names = sig_names - meta_arg_names
for k, v in sig.parameters.items():
if k in non_meta_arg_names and \
k not in concrete_args and \
v.default is not inspect.Parameter.empty:
k not in concrete_args and \
v.default is not inspect.Parameter.empty:
concrete_args[k] = v.default
# get non concrete arg names

View File

@ -13,6 +13,14 @@ class GeminiManager:
PatrickStar: Parallel Training of Pre-trained Models via Chunk-based Memory Management
https://arxiv.org/abs/2108.05818
Args:
placement_policy (str): Which device to place *held* tensors. It can be 'cpu', 'cuda' and 'auto'.
If it's 'cpu', parameters, gradients and optimizer states will be offloaded to CPU, which means min CUDA memory will be used.
If it's 'cuda', they won't be offloaded, which means max CUDA memory will be used.
If it's 'auto', they are moving dynamically based on CPU and CUDA memory usage. It will utilize heterogeneous memory space evenly and well.
Note that 'auto' policy can only work well when no other processes use CUDA during your training.
chunk_manager (ChunkManager): A ``ChunkManager`` instance.
"""
def __init__(self, placement_policy: str, chunk_manager: ChunkManager) -> None:

View File

@ -40,7 +40,7 @@ def _cast_float(args, dtype: torch.dtype):
class ColoDDP(torch.nn.Module):
"""Distributed data parallel for ColoTensor. Nested ColoDDP is not supported now.
Example::
Example:
>>> from colossalai.core import global_context as gpc
>>> from colossalai.context import ParallelMode
>>> model = torch.nn.Linear(20, 1)
@ -148,7 +148,7 @@ class ColoDDP(torch.nn.Module):
"""Sets parameters to be ignored by DDP.
This method must be called before initializing ColoDDP.
Example::
Example:
>>> params_to_ignore = []
>>> for p in module.parameters():
>>> if should_ignore(p):
@ -174,7 +174,7 @@ class ZeroDDP(ColoDDP):
We can configure chunk and gemini via ChunkManager and GeminiManager respectively.
For more details, see the API reference of ``ChunkManager`` and ``GeminiManager``.
Example::
Example:
>>> model = torch.nn.Linear(20, 1)
>>> placement_policy = 'cuda'
>>> chunk_size = ChunkManager.search_chunk_size(model, search_range, n_grids) if use_chunk else None
@ -283,7 +283,7 @@ class ZeroDDP(ColoDDP):
dict:
a dictionary containing a whole state of the module
Example::
Example:
>>> module.state_dict().keys()
['bias', 'weight']

View File

@ -21,7 +21,7 @@ def colo_op_impl(func):
by ``__torch_function__`` dispatch and has a ColoTensor as any of its
parameters, the function provided will be invoked for that operator.
Example::
Example:
>>> @colo_op_impl(torch.nn.functional.linear)
>>> def my_custom_linear(types, args, kwargs, process_group):
>>> ....

View File

@ -41,7 +41,7 @@ class ParamOpHookManager:
def use_hooks(*hooks: ParamOpHook):
"""Change the param op hooks you use. Nested calling is allowed.
Example::
Example:
>>> with ParamOpHookManager.use_hooks(*hooks):
>>> do_something()
>>> with ParamOpHookManager.use_hooks():

View File

@ -26,7 +26,7 @@ def named_params_with_colotensor(
(string, Union[Tensor, ColoTensor]): Tuple containing
the name and parameter (or ColoTensor parameter)
Example::
Example:
>>> model = torch.nn.Linear(*linear_size)
>>> delattr(model.weight)

View File

@ -16,6 +16,30 @@ class OptimState(Enum):
class ZeroOptimizer(ColossalaiOptimizer):
"""A wrapper for optimizer. ``ZeroDDP`` and ``ZeroOptimizer`` implement Zero Redundancy Optimizer (ZeRO state-3).
Note:
You must use ``ZeroDDP`` with ``ZeroOptimizer``.
Note:
Make sure you set ``placement_policy`` of ``GeminiManager`` to `"auto"`,
if you set ``gpu_margin_mem_ratio > 0``.
Args:
optim (Optimizer): An Optimizer instance.
module (ZeroDDP): A ``ZeroDDP`` instance.
gpu_margin_mem_ratio (float, optional): The ratio of GPU remaining memory (after the first forward-backward)
which will be used when using hybrid CPU optimizer.
This argument is meaningless when `placement_policy` of `GeminiManager` is not "auto".
Defaults to 0.0.
initial_scale (float, optional): Initial scale used by DynamicGradScaler. Defaults to 2**32.
min_scale (float, optional): Min scale used by DynamicGradScaler. Defaults to 1.
growth_factor (float, optional): growth_factor used by DynamicGradScaler. Defaults to 2.
backoff_factor (float, optional): backoff_factor used by DynamicGradScaler. Defaults to 0.5.
growth_interval (float, optional): growth_interval used by DynamicGradScaler. Defaults to 1000.
hysteresis (float, optional): hysteresis used by DynamicGradScaler. Defaults to 2.
max_scale (int, optional): max_scale used by DynamicGradScaler. Defaults to 2**32.
"""
def __init__(self,
optim: Optimizer,

View File

@ -0,0 +1,8 @@
colossalai.amp.naive\_amp.grad\_scaler
======================================
.. automodule:: colossalai.amp.naive_amp.grad_scaler
:members:

View File

@ -4,6 +4,11 @@ colossalai.amp.naive\_amp
.. automodule:: colossalai.amp.naive_amp
:members:
.. toctree::
:maxdepth: 2
colossalai.amp.naive_amp.grad_scaler
.. toctree::
:maxdepth: 2

View File

@ -1,5 +0,0 @@
colossalai.builder.pipeline
===========================
.. automodule:: colossalai.builder.pipeline
:members:

View File

@ -9,4 +9,3 @@ colossalai.builder
:maxdepth: 2
colossalai.builder.builder
colossalai.builder.pipeline

View File

@ -0,0 +1,5 @@
colossalai.cli.benchmark.benchmark
==================================
.. automodule:: colossalai.cli.benchmark.benchmark
:members:

View File

@ -0,0 +1,5 @@
colossalai.cli.benchmark.models
===============================
.. automodule:: colossalai.cli.benchmark.models
:members:

View File

@ -0,0 +1,13 @@
colossalai.cli.benchmark
========================
.. automodule:: colossalai.cli.benchmark
:members:
.. toctree::
:maxdepth: 2
colossalai.cli.benchmark.benchmark
colossalai.cli.benchmark.models
colossalai.cli.benchmark.utils

View File

@ -0,0 +1,5 @@
colossalai.cli.benchmark.utils
==============================
.. automodule:: colossalai.cli.benchmark.utils
:members:

View File

@ -0,0 +1,5 @@
colossalai.cli.check.check\_installation
========================================
.. automodule:: colossalai.cli.check.check_installation
:members:

View File

@ -0,0 +1,11 @@
colossalai.cli.check
====================
.. automodule:: colossalai.cli.check
:members:
.. toctree::
:maxdepth: 2
colossalai.cli.check.check_installation

View File

@ -0,0 +1,5 @@
colossalai.cli.cli
==================
.. automodule:: colossalai.cli.cli
:members:

View File

@ -0,0 +1,5 @@
colossalai.cli.launcher.hostinfo
================================
.. automodule:: colossalai.cli.launcher.hostinfo
:members:

View File

@ -0,0 +1,5 @@
colossalai.cli.launcher.multinode\_runner
=========================================
.. automodule:: colossalai.cli.launcher.multinode_runner
:members:

View File

@ -0,0 +1,13 @@
colossalai.cli.launcher
=======================
.. automodule:: colossalai.cli.launcher
:members:
.. toctree::
:maxdepth: 2
colossalai.cli.launcher.hostinfo
colossalai.cli.launcher.multinode_runner
colossalai.cli.launcher.run

View File

@ -0,0 +1,5 @@
colossalai.cli.launcher.run
===========================
.. automodule:: colossalai.cli.launcher.run
:members:

View File

@ -0,0 +1,18 @@
colossalai.cli
==============
.. automodule:: colossalai.cli
:members:
.. toctree::
:maxdepth: 2
colossalai.cli.benchmark
colossalai.cli.check
colossalai.cli.launcher
.. toctree::
:maxdepth: 2
colossalai.cli.cli

View File

@ -0,0 +1,5 @@
colossalai.engine.gradient\_accumulation
========================================
.. automodule:: colossalai.engine.gradient_accumulation
:members:

View File

@ -3,3 +3,9 @@ colossalai.engine.gradient\_handler
.. automodule:: colossalai.engine.gradient_handler
:members:
.. toctree::
:maxdepth: 2
colossalai.engine.gradient_handler.utils

View File

@ -0,0 +1,5 @@
colossalai.engine.gradient\_handler.utils
=========================================
.. automodule:: colossalai.engine.gradient_handler.utils
:members:

View File

@ -1,5 +0,0 @@
colossalai.gemini.ophooks.zero\_hook
====================================
.. automodule:: colossalai.gemini.ophooks.zero_hook
:members:

View File

@ -7,6 +7,6 @@ colossalai.engine
.. toctree::
:maxdepth: 2
colossalai.engine.gradient_accumulation
colossalai.engine.gradient_handler
colossalai.gemini.ophooks
colossalai.engine.schedule

View File

@ -0,0 +1,5 @@
colossalai.fx.passes.adding\_split\_node\_pass
==============================================
.. automodule:: colossalai.fx.passes.adding_split_node_pass
:members:

View File

@ -0,0 +1,5 @@
colossalai.fx.passes.meta\_info\_prop
=====================================
.. automodule:: colossalai.fx.passes.meta_info_prop
:members:

View File

@ -0,0 +1,15 @@
colossalai.fx.passes
====================
.. automodule:: colossalai.fx.passes
:members:
.. toctree::
:maxdepth: 2
colossalai.fx.passes.adding_split_node_pass
colossalai.fx.passes.meta_info_prop
colossalai.fx.passes.shard_1d_pass
colossalai.fx.passes.split_module
colossalai.fx.passes.utils

View File

@ -0,0 +1,5 @@
colossalai.fx.passes.shard\_1d\_pass
====================================
.. automodule:: colossalai.fx.passes.shard_1d_pass
:members:

View File

@ -0,0 +1,5 @@
colossalai.fx.passes.split\_module
==================================
.. automodule:: colossalai.fx.passes.split_module
:members:

View File

@ -0,0 +1,5 @@
colossalai.fx.passes.utils
==========================
.. automodule:: colossalai.fx.passes.utils
:members:

View File

@ -0,0 +1,5 @@
colossalai.fx.proxy
===================
.. automodule:: colossalai.fx.proxy
:members:

View File

@ -0,0 +1,17 @@
colossalai.fx
=============
.. automodule:: colossalai.fx
:members:
.. toctree::
:maxdepth: 2
colossalai.fx.passes
colossalai.fx.tracer
.. toctree::
:maxdepth: 2
colossalai.fx.proxy

View File

@ -0,0 +1,11 @@
colossalai.fx.tracer
====================
.. automodule:: colossalai.fx.tracer
:members:
.. toctree::
:maxdepth: 2
colossalai.fx.tracer.tracer

View File

@ -0,0 +1,5 @@
colossalai.fx.tracer.tracer
===========================
.. automodule:: colossalai.fx.tracer.tracer
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.chunk
=======================
.. automodule:: colossalai.gemini.chunk
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.chunk\_mgr
============================
.. automodule:: colossalai.gemini.chunk_mgr
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.gemini\_context
=================================
.. automodule:: colossalai.gemini.gemini_context
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.gemini\_mgr
=============================
.. automodule:: colossalai.gemini.gemini_mgr
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.memory\_tracer.memory\_monitor
================================================
.. automodule:: colossalai.gemini.memory_tracer.memory_monitor
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.memory\_tracer.memstats\_collector
====================================================
.. automodule:: colossalai.gemini.memory_tracer.memstats_collector
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.memory\_tracer.model\_data\_memtracer
=======================================================
.. automodule:: colossalai.gemini.memory_tracer.model_data_memtracer
:members:

View File

@ -1,5 +1,5 @@
colossalai.utils.memory\_tracer
===============================
colossalai.gemini.memory\_tracer
================================
.. automodule:: colossalai.gemini.memory_tracer
:members:
@ -8,6 +8,6 @@ colossalai.utils.memory\_tracer
.. toctree::
:maxdepth: 2
colossalai.gemini.memory_tracer.async_memtracer
colossalai.gemini.memory_tracer.memory_monitor
colossalai.gemini.memory_tracer.memstats_collector
colossalai.gemini.memory_tracer.model_data_memtracer

View File

@ -8,4 +8,4 @@ colossalai.gemini.ophooks
.. toctree::
:maxdepth: 2
colossalai.gemini.ophooks.zero_hook
colossalai.gemini.ophooks.utils

View File

@ -0,0 +1,5 @@
colossalai.gemini.ophooks.utils
===============================
.. automodule:: colossalai.gemini.ophooks.utils
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.paramhooks
============================
.. automodule:: colossalai.gemini.paramhooks
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.placement\_policy
===================================
.. automodule:: colossalai.gemini.placement_policy
:members:

View File

@ -0,0 +1,27 @@
colossalai.gemini
=================
.. automodule:: colossalai.gemini
:members:
.. toctree::
:maxdepth: 2
colossalai.gemini.memory_tracer
colossalai.gemini.ophooks
colossalai.gemini.paramhooks
.. toctree::
:maxdepth: 2
colossalai.gemini.chunk
colossalai.gemini.chunk_mgr
colossalai.gemini.gemini_context
colossalai.gemini.gemini_mgr
colossalai.gemini.placement_policy
colossalai.gemini.stateful_tensor
colossalai.gemini.stateful_tensor_container
colossalai.gemini.stateful_tensor_mgr
colossalai.gemini.tensor_placement_policy
colossalai.gemini.tensor_utils

View File

@ -0,0 +1,5 @@
colossalai.gemini.stateful\_tensor
==================================
.. automodule:: colossalai.gemini.stateful_tensor
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.stateful\_tensor\_container
=============================================
.. automodule:: colossalai.gemini.stateful_tensor_container
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.stateful\_tensor\_mgr
=======================================
.. automodule:: colossalai.gemini.stateful_tensor_mgr
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.tensor\_placement\_policy
===========================================
.. automodule:: colossalai.gemini.tensor_placement_policy
:members:

View File

@ -0,0 +1,5 @@
colossalai.gemini.tensor\_utils
===============================
.. automodule:: colossalai.gemini.tensor_utils
:members:

View File

@ -0,0 +1,5 @@
colossalai.nn.graph.graph\_node
===============================
.. automodule:: colossalai.nn.graph.graph_node
:members:

View File

@ -0,0 +1,12 @@
colossalai.nn.graph
===================
.. automodule:: colossalai.nn.graph
:members:
.. toctree::
:maxdepth: 2
colossalai.nn.graph.graph_node
colossalai.nn.graph.utils

View File

@ -0,0 +1,5 @@
colossalai.nn.graph.utils
=========================
.. automodule:: colossalai.nn.graph.utils
:members:

View File

@ -1,5 +0,0 @@
colossalai.nn.layer.wrapper.lambda\_wrapper
===========================================
.. automodule:: colossalai.nn.layer.wrapper.lambda_wrapper
:members:

View File

@ -8,5 +8,4 @@ colossalai.nn.layer.wrapper
.. toctree::
:maxdepth: 2
colossalai.nn.layer.wrapper.lambda_wrapper
colossalai.nn.layer.wrapper.pipeline_wrapper

View File

@ -1,5 +0,0 @@
colossalai.nn.model.model\_from\_config
=======================================
.. automodule:: colossalai.nn.model.model_from_config
:members:

View File

@ -1,11 +0,0 @@
colossalai.nn.model
===================
.. automodule:: colossalai.nn.model
:members:
.. toctree::
:maxdepth: 2
colossalai.nn.model.model_from_config

View File

@ -16,3 +16,4 @@ colossalai.nn.optimizer
colossalai.nn.optimizer.hybrid_adam
colossalai.nn.optimizer.lamb
colossalai.nn.optimizer.lars
colossalai.nn.optimizer.utils

View File

@ -0,0 +1,5 @@
colossalai.nn.optimizer.utils
=============================
.. automodule:: colossalai.nn.optimizer.utils
:members:

View File

@ -0,0 +1,5 @@
colossalai.nn.parallel.data\_parallel
=====================================
.. automodule:: colossalai.nn.parallel.data_parallel
:members:

View File

@ -0,0 +1,5 @@
colossalai.nn.parallel.layers.colo\_module
==========================================
.. automodule:: colossalai.nn.parallel.layers.colo_module
:members:

View File

@ -0,0 +1,5 @@
colossalai.nn.parallel.layers.embedding
=======================================
.. automodule:: colossalai.nn.parallel.layers.embedding
:members:

View File

@ -0,0 +1,5 @@
colossalai.nn.parallel.layers.linear
====================================
.. automodule:: colossalai.nn.parallel.layers.linear
:members:

View File

@ -0,0 +1,5 @@
colossalai.nn.parallel.layers.module\_utils
===========================================
.. automodule:: colossalai.nn.parallel.layers.module_utils
:members:

View File

@ -0,0 +1,14 @@
colossalai.nn.parallel.layers
=============================
.. automodule:: colossalai.nn.parallel.layers
:members:
.. toctree::
:maxdepth: 2
colossalai.nn.parallel.layers.colo_module
colossalai.nn.parallel.layers.embedding
colossalai.nn.parallel.layers.linear
colossalai.nn.parallel.layers.module_utils

View File

@ -0,0 +1,5 @@
colossalai.nn.parallel.reducer
==============================
.. automodule:: colossalai.nn.parallel.reducer
:members:

View File

@ -0,0 +1,17 @@
colossalai.nn.parallel
======================
.. automodule:: colossalai.nn.parallel
:members:
.. toctree::
:maxdepth: 2
colossalai.nn.parallel.layers
.. toctree::
:maxdepth: 2
colossalai.nn.parallel.data_parallel
colossalai.nn.parallel.reducer

View File

@ -7,12 +7,13 @@ colossalai.nn
.. toctree::
:maxdepth: 2
colossalai.nn.graph
colossalai.nn.layer
colossalai.nn.loss
colossalai.nn.lr_scheduler
colossalai.nn.metric
colossalai.nn.model
colossalai.nn.optimizer
colossalai.nn.parallel
.. toctree::

View File

@ -0,0 +1,5 @@
colossalai.pipeline.layer\_sepc
===============================
.. automodule:: colossalai.pipeline.layer_sepc
:members:

View File

@ -0,0 +1,5 @@
colossalai.pipeline.pipelinable
===============================
.. automodule:: colossalai.pipeline.pipelinable
:members:

View File

@ -0,0 +1,13 @@
colossalai.pipeline
===================
.. automodule:: colossalai.pipeline
:members:
.. toctree::
:maxdepth: 2
colossalai.pipeline.layer_sepc
colossalai.pipeline.pipelinable
colossalai.pipeline.utils

View File

@ -0,0 +1,5 @@
colossalai.pipeline.utils
=========================
.. automodule:: colossalai.pipeline.utils
:members:

View File

@ -1,18 +1,26 @@
colossalai
==========
.. automodule:: colossalai
:members:
.. toctree::
:maxdepth: 2
colossalai.amp
colossalai.builder
colossalai.cli
colossalai.communication
colossalai.context
colossalai.engine
colossalai.fx
colossalai.gemini
colossalai.kernel
colossalai.logging
colossalai.nn
colossalai.pipeline
colossalai.registry
colossalai.tensor
colossalai.testing
colossalai.trainer
colossalai.utils
@ -22,4 +30,7 @@ colossalai
.. toctree::
:maxdepth: 2
colossalai.constants
colossalai.core
colossalai.global_variables
colossalai.initialize

View File

@ -0,0 +1,5 @@
colossalai.tensor.colo\_parameter
=================================
.. automodule:: colossalai.tensor.colo_parameter
:members:

View File

@ -0,0 +1,5 @@
colossalai.tensor.colo\_tensor
==============================
.. automodule:: colossalai.tensor.colo_tensor
:members:

View File

@ -0,0 +1,5 @@
colossalai.tensor.compute\_spec
===============================
.. automodule:: colossalai.tensor.compute_spec
:members:

View File

@ -0,0 +1,5 @@
colossalai.tensor.const
=======================
.. automodule:: colossalai.tensor.const
:members:

View File

@ -0,0 +1,5 @@
colossalai.tensor.dist\_spec\_mgr
=================================
.. automodule:: colossalai.tensor.dist_spec_mgr
:members:

View File

@ -0,0 +1,5 @@
colossalai.tensor.distspec
==========================
.. automodule:: colossalai.tensor.distspec
:members:

View File

@ -0,0 +1,5 @@
colossalai.tensor.op\_wrapper
=============================
.. automodule:: colossalai.tensor.op_wrapper
:members:

View File

@ -0,0 +1,5 @@
colossalai.tensor.param\_op\_hook
=================================
.. automodule:: colossalai.tensor.param_op_hook
:members:

View File

@ -0,0 +1,5 @@
colossalai.tensor.process\_group
================================
.. automodule:: colossalai.tensor.process_group
:members:

View File

@ -0,0 +1,21 @@
colossalai.tensor
=================
.. automodule:: colossalai.tensor
:members:
.. toctree::
:maxdepth: 2
colossalai.tensor.colo_parameter
colossalai.tensor.colo_tensor
colossalai.tensor.compute_spec
colossalai.tensor.const
colossalai.tensor.dist_spec_mgr
colossalai.tensor.distspec
colossalai.tensor.op_wrapper
colossalai.tensor.param_op_hook
colossalai.tensor.process_group
colossalai.tensor.tensor_spec
colossalai.tensor.utils

View File

@ -0,0 +1,5 @@
colossalai.tensor.tensor\_spec
==============================
.. automodule:: colossalai.tensor.tensor_spec
:members:

View File

@ -0,0 +1,5 @@
colossalai.tensor.utils
=======================
.. automodule:: colossalai.tensor.utils
:members:

View File

@ -0,0 +1,5 @@
colossalai.utils.checkpoint.module\_checkpoint
==============================================
.. automodule:: colossalai.utils.checkpoint.module_checkpoint
:members:

View File

@ -0,0 +1,12 @@
colossalai.utils.checkpoint
===========================
.. automodule:: colossalai.utils.checkpoint
:members:
.. toctree::
:maxdepth: 2
colossalai.utils.checkpoint.module_checkpoint
colossalai.utils.checkpoint.utils

View File

@ -0,0 +1,5 @@
colossalai.utils.checkpoint.utils
=================================
.. automodule:: colossalai.utils.checkpoint.utils
:members:

View File

@ -1,5 +0,0 @@
colossalai.utils.gradient\_accumulation
=======================================
.. automodule:: colossalai.utils.gradient_accumulation
:members:

View File

@ -0,0 +1,5 @@
colossalai.utils.memory
=======================
.. automodule:: colossalai.utils.memory
:members:

View File

@ -1,5 +0,0 @@
colossalai.utils.memory\_tracer.async\_memtracer
================================================
.. automodule:: colossalai.gemini.memory_tracer.async_memtracer
:members:

Some files were not shown because too many files have changed in this diff Show More