Browse Source

[lazy] fix compatibility problem on torch 1.13 (#3911)

pull/3915/head
Hongxin Liu 1 year ago committed by GitHub
parent
commit
9c88b6cbd1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      colossalai/lazy/lazy_init.py

2
colossalai/lazy/lazy_init.py

@ -37,7 +37,7 @@ _EARLY_MATERIALIZED_OPS = ['__getitem__', 'split']
# If your intent is to change the metadata of a Tensor (such as sizes / strides / storage / storage_offset)
# without autograd tracking the change, remove the .data / .detach() call and wrap the change in a `with torch.no_grad():` block.
# These ops cannot be unwrapped using .data
_CHANGE_META_OPS = ['_cudnn_rnn_flatten_weight', 'requires_grad_', '__get__', '__set__']
_CHANGE_META_OPS = ['_cudnn_rnn_flatten_weight', 'requires_grad_', '__get__', '__set__', 'numel', 'size', 'dim']
_LEGACY_TENSOR_CONSTRUCTOR = {
'FloatTensor': torch.float,

Loading…
Cancel
Save