Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions .github/workflows/pre-commit.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: pre-commit

on:
push:
branches: [master]
pull_request:

jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.8
architecture: x64
- name: Checkout Torchrec
uses: actions/checkout@v2
- name: Run pre-commit
uses: pre-commit/action@v2.0.3
16 changes: 16 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
hooks:
- id: check-toml
- id: check-yaml
exclude: packaging/.*
- id: end-of-file-fixer

- repo: https://github.com/omnilib/ufmt
rev: v1.3.0
hooks:
- id: ufmt
additional_dependencies:
- black == 21.9b0
- usort == 0.6.4
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[tool.usort]

first_party_detection = false
12 changes: 4 additions & 8 deletions torchrec/distributed/planner/embedding_planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,10 +119,7 @@ def plan(
module=module,
sharders=sharders,
)
unplaced_param_infos: List[Tuple[ParamSortKey, ParameterInfo]] = [
(param_sort_key(param_info, self._world_size), param_info)
for param_info in param_infos
]
unplaced_param_infos: List[Tuple[ParamSortKey, ParameterInfo]] = [(param_sort_key(param_info, self._world_size), param_info) for param_info in param_infos ]
placed_param_infos: List[Tuple[ParamSortKey, ParameterInfo]] = []

heapq.heapify(unplaced_param_infos)
Expand Down Expand Up @@ -175,14 +172,13 @@ def _log_stats(
]
emb_dims = [param_info.param.shape[1]]
if shard.sharding_type == ShardingType.ROW_WISE.value:
pooling_factor = [pooling_factor[0] / self._world_size] * len(ranks)
pooling_factor = [
pooling_factor[0] / self._world_size] * len(ranks)
emb_dims = emb_dims * len(ranks)
elif shard.sharding_type == ShardingType.TABLE_ROW_WISE.value:
# pyre-ignore [16]
host_id = shard.ranks[0] // self._local_size
ranks = list(
range(host_id * self._local_size, (host_id + 1) * self._local_size)
)
ranks = list(range(host_id * self._local_size, (host_id + 1) * self._local_size))
pooling_factor = [pooling_factor[0] / self._local_size] * len(ranks)
emb_dims = emb_dims * len(ranks)
elif shard.sharding_type == ShardingType.COLUMN_WISE.value:
Expand Down