Browse Source

Merge pull request #5515 from Edenzzzz/fix_layout_convert

Fix layout convertor caching
pull/5517/head
Edenzzzz 8 months ago committed by GitHub
parent
commit
9a3321e9f4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 5
      colossalai/tensor/d_tensor/layout_converter.py
  2. 11
      tests/test_tensor/test_dtensor/test_layout_converter.py

5
colossalai/tensor/d_tensor/layout_converter.py

@ -440,7 +440,10 @@ class LayoutConverter(metaclass=SingletonMeta):
total_steps = 0
transform_path = []
comm_action_sequence: List[CommSpec] = []
spec_pairs = (str(source_spec.sharding_sequence), str(target_spec.sharding_sequence))
src_shape = source_layout.get_sharded_shape_per_device()
dst_shape = target_layout.get_sharded_shape_per_device()
spec_pairs = ((str(source_spec.sharding_sequence), src_shape), (str(target_spec.sharding_sequence), dst_shape))
if spec_pairs in self.cached_solution:
# Solution Cache hit

11
tests/test_tensor/test_dtensor/test_layout_converter.py

@ -123,8 +123,15 @@ def check_layout_converting(rank, world_size, port):
assert comm_action_sequence[2].logical_process_axis == 1
# checkout chached_spec_pairs_transform_path
assert layout_converter.cached_solution[("[R, S01, R]", "[S01, R, R]")][0] == transform_path
assert layout_converter.cached_solution[("[R, S01, R]", "[S01, R, R]")][1] == comm_action_sequence
src_shape = source_layout.get_sharded_shape_per_device()
dst_shape = target_layout.get_sharded_shape_per_device()
assert (
layout_converter.cached_solution[(("[R, S01, R]", src_shape), ("[S01, R, R]", dst_shape))][0] == transform_path
)
assert (
layout_converter.cached_solution[(("[R, S01, R]", src_shape), ("[S01, R, R]", dst_shape))][1]
== comm_action_sequence
)
comm_cost = layout_converter.get_total_comm_cost(source_layout, target_layout)

Loading…
Cancel
Save