2021-10-28 16:21:23 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- encoding: utf-8 -*-
|
|
|
|
|
|
|
|
import inspect
|
|
|
|
import sys
|
|
|
|
from importlib.machinery import SourceFileLoader
|
|
|
|
from pathlib import Path
|
2022-01-04 12:03:26 +00:00
|
|
|
from colossalai.logging import get_dist_logger
|
2021-10-28 16:21:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Config(dict):
|
|
|
|
"""This is a wrapper class for dict objects so that values of which can be
|
|
|
|
accessed as attributes.
|
|
|
|
|
2022-03-25 05:02:39 +00:00
|
|
|
Args:
|
|
|
|
config (dict): The dict object to be wrapped.
|
2021-10-28 16:21:23 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, config: dict = None):
|
|
|
|
if config is not None:
|
|
|
|
for k, v in config.items():
|
|
|
|
self._add_item(k, v)
|
|
|
|
|
|
|
|
def __missing__(self, key):
|
|
|
|
raise KeyError(key)
|
|
|
|
|
|
|
|
def __getattr__(self, key):
|
|
|
|
try:
|
|
|
|
value = super(Config, self).__getitem__(key)
|
|
|
|
return value
|
|
|
|
except KeyError:
|
|
|
|
raise AttributeError(key)
|
|
|
|
|
|
|
|
def __setattr__(self, key, value):
|
|
|
|
super(Config, self).__setitem__(key, value)
|
|
|
|
|
|
|
|
def _add_item(self, key, value):
|
|
|
|
if isinstance(value, dict):
|
|
|
|
self.__setattr__(key, Config(value))
|
|
|
|
else:
|
|
|
|
self.__setattr__(key, value)
|
|
|
|
|
|
|
|
def update(self, config):
|
|
|
|
assert isinstance(config, (Config, dict)), 'can only update dictionary or Config objects.'
|
|
|
|
for k, v in config.items():
|
|
|
|
self._add_item(k, v)
|
|
|
|
return self
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def from_file(filename: str):
|
|
|
|
"""Reads a python file and constructs a corresponding :class:`Config` object.
|
|
|
|
|
2022-03-25 05:02:39 +00:00
|
|
|
Args:
|
|
|
|
filename (str): Name of the file to construct the return object.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
:class:`Config`: A :class:`Config` object constructed with information in the file.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
AssertionError: Raises an AssertionError if the file does not exist, or the file is not .py file
|
2021-10-28 16:21:23 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
# check config path
|
|
|
|
if isinstance(filename, str):
|
|
|
|
filepath = Path(filename).absolute()
|
|
|
|
elif isinstance(filename, Path):
|
|
|
|
filepath = filename.absolute()
|
|
|
|
|
|
|
|
assert filepath.exists(), f'{filename} is not found, please check your configuration path'
|
|
|
|
|
|
|
|
# check extension
|
|
|
|
extension = filepath.suffix
|
|
|
|
assert extension == '.py', 'only .py files are supported'
|
|
|
|
|
|
|
|
# import the config as module
|
|
|
|
remove_path = False
|
|
|
|
if filepath.parent not in sys.path:
|
|
|
|
sys.path.insert(0, (filepath))
|
|
|
|
remove_path = True
|
|
|
|
|
|
|
|
module_name = filepath.stem
|
|
|
|
source_file = SourceFileLoader(fullname=str(module_name), path=str(filepath))
|
|
|
|
module = source_file.load_module()
|
|
|
|
|
|
|
|
# load into config
|
|
|
|
config = Config()
|
|
|
|
|
|
|
|
for k, v in module.__dict__.items():
|
|
|
|
if k.startswith('__') or inspect.ismodule(v) or inspect.isclass(v):
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
config._add_item(k, v)
|
|
|
|
|
2022-01-04 12:03:26 +00:00
|
|
|
logger = get_dist_logger()
|
|
|
|
logger.debug('variables which starts with __, is a module or class declaration are omitted in config file')
|
2021-10-28 16:21:23 +00:00
|
|
|
|
|
|
|
# remove module
|
|
|
|
del sys.modules[module_name]
|
|
|
|
if remove_path:
|
|
|
|
sys.path.pop(0)
|
|
|
|
|
|
|
|
return config
|
Develop/experiments (#59)
* Add gradient accumulation, fix lr scheduler
* fix FP16 optimizer and adapted torch amp with tensor parallel (#18)
* fixed bugs in compatibility between torch amp and tensor parallel and performed some minor fixes
* fixed trainer
* Revert "fixed trainer"
This reverts commit 2e0b0b76990e8d4e337add483d878c0f61cf5097.
* improved consistency between trainer, engine and schedule (#23)
Co-authored-by: 1SAA <c2h214748@gmail.com>
* Split conv2d, class token, positional embedding in 2d, Fix random number in ddp
Fix convergence in cifar10, Imagenet1000
* Integrate 1d tensor parallel in Colossal-AI (#39)
* fixed 1D and 2D convergence (#38)
* optimized 2D operations
* fixed 1D ViT convergence problem
* Feature/ddp (#49)
* remove redundancy func in setup (#19) (#20)
* use env to control the language of doc (#24) (#25)
* Support TP-compatible Torch AMP and Update trainer API (#27)
* Add gradient accumulation, fix lr scheduler
* fix FP16 optimizer and adapted torch amp with tensor parallel (#18)
* fixed bugs in compatibility between torch amp and tensor parallel and performed some minor fixes
* fixed trainer
* Revert "fixed trainer"
This reverts commit 2e0b0b76990e8d4e337add483d878c0f61cf5097.
* improved consistency between trainer, engine and schedule (#23)
Co-authored-by: 1SAA <c2h214748@gmail.com>
Co-authored-by: 1SAA <c2h214748@gmail.com>
Co-authored-by: ver217 <lhx0217@gmail.com>
* add an example of ViT-B/16 and remove w_norm clipping in LAMB (#29)
* add explanation for ViT example (#35) (#36)
* support torch ddp
* fix loss accumulation
* add log for ddp
* change seed
* modify timing hook
Co-authored-by: Frank Lee <somerlee.9@gmail.com>
Co-authored-by: 1SAA <c2h214748@gmail.com>
Co-authored-by: binmakeswell <binmakeswell@gmail.com>
* Feature/pipeline (#40)
* remove redundancy func in setup (#19) (#20)
* use env to control the language of doc (#24) (#25)
* Support TP-compatible Torch AMP and Update trainer API (#27)
* Add gradient accumulation, fix lr scheduler
* fix FP16 optimizer and adapted torch amp with tensor parallel (#18)
* fixed bugs in compatibility between torch amp and tensor parallel and performed some minor fixes
* fixed trainer
* Revert "fixed trainer"
This reverts commit 2e0b0b76990e8d4e337add483d878c0f61cf5097.
* improved consistency between trainer, engine and schedule (#23)
Co-authored-by: 1SAA <c2h214748@gmail.com>
Co-authored-by: 1SAA <c2h214748@gmail.com>
Co-authored-by: ver217 <lhx0217@gmail.com>
* add an example of ViT-B/16 and remove w_norm clipping in LAMB (#29)
* add explanation for ViT example (#35) (#36)
* optimize communication of pipeline parallel
* fix grad clip for pipeline
Co-authored-by: Frank Lee <somerlee.9@gmail.com>
Co-authored-by: 1SAA <c2h214748@gmail.com>
Co-authored-by: binmakeswell <binmakeswell@gmail.com>
* optimized 3d layer to fix slow computation ; tested imagenet performance with 3d; reworked lr_scheduler config definition; fixed launch args; fixed some printing issues; simplified apis of 3d layers (#51)
* Update 2.5d layer code to get a similar accuracy on imagenet-1k dataset
* update api for better usability (#58)
update api for better usability
Co-authored-by: 1SAA <c2h214748@gmail.com>
Co-authored-by: ver217 <lhx0217@gmail.com>
Co-authored-by: puck_WCR <46049915+WANG-CR@users.noreply.github.com>
Co-authored-by: binmakeswell <binmakeswell@gmail.com>
Co-authored-by: アマデウス <kurisusnowdeng@users.noreply.github.com>
Co-authored-by: BoxiangW <45734921+BoxiangW@users.noreply.github.com>
2021-12-09 07:08:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ConfigException(Exception):
|
|
|
|
pass
|