Skip to content

Commit

Permalink
[DTensor] Remove compute_local_offset from _utils.py (#109096)
Browse files Browse the repository at this point in the history
Separating internal changes with OSS changes. This PR contains removing the compute_local_offset from the OSS directory only.

This replaces #108965
Pull Request resolved: #109096
Approved by: https://github.com/wanchaol, https://github.com/fduwjj
  • Loading branch information
wz337 authored and pytorchmergebot committed Sep 12, 2023
1 parent cf26e55 commit 6dc56d3
Showing 1 changed file with 0 additions and 36 deletions.
36 changes: 0 additions & 36 deletions torch/distributed/_tensor/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,42 +43,6 @@ def compute_local_shape(
return tuple(local_shape)


# TODO: audit existing code base to see if we can safely remove this API.
def compute_local_offset(
global_shape: ShapeType, mesh: DeviceMesh, placements: Sequence[Placement]
) -> Tuple[int, ...]:
"""
Compute the offsets of a local shard of the given DTensor on its current
global rank. This is mostly used by distributed checkpointing to know the
exact offsets of the local shard.
"""
my_coordinate = mesh.get_coordinate()

if my_coordinate is None:
# if rank not in the mesh, return empty offset
return ()
else:
local_offsets = [0] * len(global_shape)
local_shape = list(global_shape)

for idx, placement in enumerate(placements):
mesh_dim_size = mesh.size(idx)
if isinstance(placement, Shard):
shard_dim = placement.dim
assert shard_dim < len(
local_shape
), f"Sharding dim {shard_dim} greater than tensor ndim {len(local_shape)}"
shard_size, shard_offset = placement._local_shard_size_on_dim(
local_shape[shard_dim],
mesh_dim_size,
my_coordinate[idx],
return_offset=True,
)
local_shape[shard_dim] = shard_size
local_offsets[shard_dim] = shard_offset
return tuple(local_offsets)


def compute_local_shape_and_global_offset(
global_shape: ShapeType, mesh: DeviceMesh, placements: Sequence[Placement]
) -> Tuple[Tuple[int, ...], Tuple[int, ...]]:
Expand Down

0 comments on commit 6dc56d3

Please sign in to comment.