mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
17 lines
440 B
17 lines
440 B
#!/usr/bin/env python
|
|
# -*- encoding: utf-8 -*-
|
|
|
|
from colossalai.context import ParallelContext
|
|
|
|
global_context = ParallelContext()
|
|
|
|
|
|
def set_global_context(context: ParallelContext):
|
|
'''Reset global context to be identical to a given :class:ParallelContext.
|
|
|
|
:param context: Parallel context to generate our global parallel context.
|
|
:type context: ParallelContext
|
|
'''
|
|
global global_context
|
|
global_context = context
|