Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions .actions/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,6 +431,42 @@ def copy_replace_imports(
source_dir, source_imports, target_imports, target_dir=target_dir, lightning_by=lightning_by
)

@staticmethod
def pull_docs_files(
gh_user_repo: str,
target_dir: str = "docs/source-pytorch/XXX",
checkout: str = "tags/1.0.0",
source_dir: str = "docs/source",
) -> None:
"""Pull docs pages from external source and append to local docs."""
import zipfile

zip_url = f"https://github.com/{gh_user_repo}/archive/refs/{checkout}.zip"

with tempfile.TemporaryDirectory() as tmp:
zip_file = os.path.join(tmp, "repo.zip")
urllib.request.urlretrieve(zip_url, zip_file)

with zipfile.ZipFile(zip_file, "r") as zip_ref:
zip_ref.extractall(tmp)

zip_dirs = [d for d in glob.glob(os.path.join(tmp, "*")) if os.path.isdir(d)]
# check that the extracted archive has only repo folder
assert len(zip_dirs) == 1
repo_dir = zip_dirs[0]

ls_pages = glob.glob(os.path.join(repo_dir, source_dir, "*.rst"))
ls_pages += glob.glob(os.path.join(repo_dir, source_dir, "**", "*.rst"))
for rst in ls_pages:
rel_rst = rst.replace(os.path.join(repo_dir, source_dir) + os.path.sep, "")
rel_dir = os.path.dirname(rel_rst)
os.makedirs(os.path.join(_PROJECT_ROOT, target_dir, rel_dir), exist_ok=True)
new_rst = os.path.join(_PROJECT_ROOT, target_dir, rel_rst)
if os.path.isfile(new_rst):
logging.warning(f"Page {new_rst} already exists in the local tree so it will be skipped.")
continue
shutil.copy(rst, new_rst)


if __name__ == "__main__":
import jsonargparse
Expand Down
109 changes: 0 additions & 109 deletions docs/source-pytorch/accelerators/hpu_basic.rst

This file was deleted.

101 changes: 0 additions & 101 deletions docs/source-pytorch/accelerators/hpu_intermediate.rst

This file was deleted.

6 changes: 3 additions & 3 deletions docs/source-pytorch/advanced/model_parallel.rst
Original file line number Diff line number Diff line change
Expand Up @@ -58,11 +58,11 @@ Cutting-edge and third-party Strategies

Cutting-edge Lightning strategies are being developed by third-parties outside of Lightning.

If you want to try some of the latest and greatest features for model-parallel training, check out the :doc:`Colossal-AI Strategy <./third_party/colossalai>` integration.
If you want to try some of the latest and greatest features for model-parallel training, check out the :doc:`Colossal-AI Strategy <../integrations/strategies/colossalai>` integration.

Another integration is :doc:`Bagua Strategy <./third_party/bagua>`, deep learning training acceleration framework for PyTorch, with advanced distributed training algorithms and system optimizations.
Another integration is :doc:`Bagua Strategy <../integrations/strategies/bagua>`, deep learning training acceleration framework for PyTorch, with advanced distributed training algorithms and system optimizations.

For training on unreliable mixed GPUs across the internet check out the :doc:`Hivemind Strategy <./third_party/hivemind>` integration.
For training on unreliable mixed GPUs across the internet check out the :doc:`Hivemind Strategy <../integrations/strategies/hivemind>` integration.

----

Expand Down
4 changes: 2 additions & 2 deletions docs/source-pytorch/common/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
Save memory with half-precision <precision>
../advanced/model_parallel
Train on single or multiple GPUs <../accelerators/gpu>
Train on single or multiple HPUs <../accelerators/hpu>
Train on single or multiple HPUs <../integrations/hpu/index>
Train on single or multiple IPUs <../accelerators/ipu>
Train on single or multiple TPUs <../accelerators/tpu>
Train on MPS <../accelerators/mps>
Expand Down Expand Up @@ -148,7 +148,7 @@ How-to Guides
.. displayitem::
:header: Train on single or multiple HPUs
:description: Train models faster with HPU accelerators
:button_link: ../accelerators/hpu.html
:button_link: ../integrations/hpu/index.html
:col_css: col-md-4
:height: 180

Expand Down
2 changes: 1 addition & 1 deletion docs/source-pytorch/common_usecases.rst
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ Customize and extend Lightning for things like custom hardware or distributed st
:header: Train on single or multiple HPUs
:description: Train models faster with HPUs.
:col_css: col-md-12
:button_link: accelerators/hpu.html
:button_link: integrations/hpu/index.html
:height: 100

.. displayitem::
Expand Down
Loading