mirror of https://github.com/hpcaitech/ColossalAI
[NFC] polish code format
commit
0dcc410f57
|
@ -1,6 +1,7 @@
|
||||||
import torch
|
|
||||||
import operator
|
import operator
|
||||||
|
|
||||||
|
import torch
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'ELEMENTWISE_MODULE_OP', 'ELEMENTWISE_FUNC_OP', 'RESHAPE_FUNC_OP', 'CONV_MODULE_OP', 'CONV_FUNC_OP',
|
'ELEMENTWISE_MODULE_OP', 'ELEMENTWISE_FUNC_OP', 'RESHAPE_FUNC_OP', 'CONV_MODULE_OP', 'CONV_FUNC_OP',
|
||||||
'LINEAR_MODULE_OP', 'LINEAR_FUNC_OP', 'BATCHNORM_MODULE_OP', 'POOL_MODULE_OP', 'NON_PARAM_FUNC_OP', 'BCAST_FUNC_OP',
|
'LINEAR_MODULE_OP', 'LINEAR_FUNC_OP', 'BATCHNORM_MODULE_OP', 'POOL_MODULE_OP', 'NON_PARAM_FUNC_OP', 'BCAST_FUNC_OP',
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
|
from collections import OrderedDict as ODict
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from torch.fx.node import Node
|
from typing import Any, List, OrderedDict, Union
|
||||||
|
|
||||||
from torch.fx.graph import Graph
|
from torch.fx.graph import Graph
|
||||||
from torch.fx.graph_module import GraphModule
|
from torch.fx.graph_module import GraphModule
|
||||||
from collections import OrderedDict as ODict
|
from torch.fx.node import Node
|
||||||
from typing import List, OrderedDict, Union, Any
|
|
||||||
from colossalai.fx.passes.utils import get_node_module
|
from colossalai.fx.passes.utils import get_node_module
|
||||||
|
|
||||||
__all__ = ['LiveVariable', 'LiveVariableVector', 'LiveStage', 'GraphAnalyser']
|
__all__ = ['LiveVariable', 'LiveVariableVector', 'LiveStage', 'GraphAnalyser']
|
||||||
|
|
|
@ -2,9 +2,9 @@ import operator
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
from colossalai.auto_parallel.tensor_shard.deprecated._utils import \
|
|
||||||
ignore_sharding_exception
|
from colossalai.auto_parallel.tensor_shard.deprecated._utils import ignore_sharding_exception
|
||||||
from colossalai.auto_parallel.tensor_shard.deprecated.sharding_strategy import (ShardingStrategy, StrategiesVector)
|
from colossalai.auto_parallel.tensor_shard.deprecated.sharding_strategy import ShardingStrategy, StrategiesVector
|
||||||
|
|
||||||
from .operator_handler import OperatorHandler
|
from .operator_handler import OperatorHandler
|
||||||
|
|
||||||
|
|
|
@ -5,9 +5,9 @@ from functools import reduce
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
from colossalai.auto_parallel.tensor_shard.deprecated._utils import \
|
|
||||||
ignore_sharding_exception
|
from colossalai.auto_parallel.tensor_shard.deprecated._utils import ignore_sharding_exception
|
||||||
from colossalai.auto_parallel.tensor_shard.deprecated.sharding_strategy import (ShardingStrategy, StrategiesVector)
|
from colossalai.auto_parallel.tensor_shard.deprecated.sharding_strategy import ShardingStrategy, StrategiesVector
|
||||||
from colossalai.tensor.shape_consistency import ShapeConsistencyManager
|
from colossalai.tensor.shape_consistency import ShapeConsistencyManager
|
||||||
from colossalai.tensor.sharding_spec import ShardingSpec
|
from colossalai.tensor.sharding_spec import ShardingSpec
|
||||||
|
|
||||||
|
|
|
@ -6,11 +6,10 @@ from functools import reduce
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
from colossalai.auto_parallel.tensor_shard.deprecated._utils import \
|
|
||||||
ignore_sharding_exception
|
from colossalai.auto_parallel.tensor_shard.deprecated._utils import ignore_sharding_exception
|
||||||
from colossalai.auto_parallel.tensor_shard.deprecated.constants import \
|
from colossalai.auto_parallel.tensor_shard.deprecated.constants import INFINITY_COST
|
||||||
INFINITY_COST
|
from colossalai.auto_parallel.tensor_shard.deprecated.sharding_strategy import ShardingStrategy, StrategiesVector
|
||||||
from colossalai.auto_parallel.tensor_shard.deprecated.sharding_strategy import (ShardingStrategy, StrategiesVector)
|
|
||||||
from colossalai.tensor.shape_consistency import ShapeConsistencyManager
|
from colossalai.tensor.shape_consistency import ShapeConsistencyManager
|
||||||
from colossalai.tensor.sharding_spec import ShardingSpec
|
from colossalai.tensor.sharding_spec import ShardingSpec
|
||||||
|
|
||||||
|
|
|
@ -4,10 +4,11 @@ from functools import reduce
|
||||||
from typing import Dict, List, Optional, Union
|
from typing import Dict, List, Optional, Union
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
|
from torch.fx.node import Node
|
||||||
|
|
||||||
from colossalai.device.device_mesh import DeviceMesh
|
from colossalai.device.device_mesh import DeviceMesh
|
||||||
from colossalai.tensor.shape_consistency import ShapeConsistencyManager
|
from colossalai.tensor.shape_consistency import ShapeConsistencyManager
|
||||||
from colossalai.tensor.sharding_spec import ShardingSpec
|
from colossalai.tensor.sharding_spec import ShardingSpec
|
||||||
from torch.fx.node import Node
|
|
||||||
|
|
||||||
from ..constants import INFINITY_COST
|
from ..constants import INFINITY_COST
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue