From 716ef1cf7dd59349fe69e1250550cfb4fa0064c1 Mon Sep 17 00:00:00 2001 From: JackCaoG Date: Sat, 29 Aug 2020 02:01:09 +0000 Subject: [PATCH] Add all_to_all back to our doc --- docs/source/index.rst | 1 + torch_xla/core/xla_model.py | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/source/index.rst b/docs/source/index.rst index 7ba2c48cee53..3698363e16e3 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -16,6 +16,7 @@ xla_model .. autofunction:: xrt_world_size .. autofunction:: all_reduce .. autofunction:: all_gather +.. autofunction:: all_to_all .. autofunction:: add_step_closure .. autofunction:: wait_device_ops .. autofunction:: optimizer_step diff --git a/torch_xla/core/xla_model.py b/torch_xla/core/xla_model.py index 8187a6dba912..ed1d9e9a72cc 100644 --- a/torch_xla/core/xla_model.py +++ b/torch_xla/core/xla_model.py @@ -619,9 +619,6 @@ def all_to_all(value, groups=None): """Performs an XLA `AllToAll()` operation on the input tensor. - WARNING: This function is not very reliable, may produce wrong results under - certain inputs. Use it at your own risk. - See: https://www.tensorflow.org/xla/operation_semantics#alltoall Args: