Skip to content

Commit

Permalink
fix torch.distributed.rpc example incorrect usage (#112367)
Browse files Browse the repository at this point in the history
Fixes #112366
Pull Request resolved: #112367
Approved by: https://github.com/H-Huang
  • Loading branch information
littsk authored and pytorchmergebot committed Nov 1, 2023
1 parent 54c7d0d commit 623a311
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions torch/distributed/rpc/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -612,8 +612,8 @@ def remote(to, func, args=None, kwargs=None, timeout=UNSET_RPC_TIMEOUT):
>>> # On both workers:
>>> @torch.jit.script
>>> def my_script_add(t1, t2):
>>> return torch.add(t1, t2)
>>> def my_script_add(tensor: torch.Tensor, scalar: int):
>>> return torch.add(tensor, scalar)
>>> # On worker 0:
>>> import torch.distributed.rpc as rpc
Expand Down Expand Up @@ -793,8 +793,8 @@ def rpc_sync(to, func, args=None, kwargs=None, timeout: float = UNSET_RPC_TIMEOU
>>> # On both workers:
>>> @torch.jit.script
>>> def my_script_add(t1, t2):
>>> return torch.add(t1, t2)
>>> def my_script_add(tensor: torch.Tensor, scalar: int):
>>> return torch.add(tensor, scalar)
>>> # On worker 0:
>>> import torch.distributed.rpc as rpc
Expand Down Expand Up @@ -887,8 +887,8 @@ def rpc_async(to, func, args=None, kwargs=None, timeout=UNSET_RPC_TIMEOUT):
>>> # On both workers:
>>> @torch.jit.script
>>> def my_script_add(t1, t2):
>>> return torch.add(t1, t2)
>>> def my_script_add(tensor: torch.Tensor, scalar: int):
>>> return torch.add(tensor, scalar)
>>> # On worker 0:
>>> import torch.distributed.rpc as rpc
Expand Down

0 comments on commit 623a311

Please sign in to comment.