Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use docconvert to normalize docstring to Google style #5580

Merged
merged 10 commits into from
Nov 15, 2023
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 2 additions & 4 deletions onnx/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,8 +284,7 @@ def save_model(
size_threshold: int = 1024,
convert_attribute: bool = False,
) -> None:
"""
Saves the ModelProto to the specified path and optionally, serialize tensors with raw data as external data before saving.
"""Saves the ModelProto to the specified path and optionally, serialize tensors with raw data as external data before saving.

Args:
proto: should be a in-memory ModelProto
Expand Down Expand Up @@ -332,8 +331,7 @@ def save_tensor(
f: IO[bytes] | str | os.PathLike,
format: _SupportedFormat | None = None, # noqa: A002
) -> None:
"""
Saves the TensorProto to the specified path.
"""Saves the TensorProto to the specified path.

Args:
proto: should be a in-memory TensorProto
Expand Down
17 changes: 6 additions & 11 deletions onnx/backend/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,15 @@


class DeviceType:
"""
Describes device type.
"""
"""Describes device type."""

_Type = NewType("_Type", int)
CPU: _Type = _Type(0)
CUDA: _Type = _Type(1)


class Device:
"""
Describes device type and device id
"""Describes device type and device id
syntax: device_type:device_id(optional)
example: 'CPU', 'CUDA', 'CUDA:1'
"""
Expand Down Expand Up @@ -56,8 +53,7 @@ def getitem(self: Any, key: Any) -> Any:


class BackendRep:
"""
BackendRep is the handle that a Backend returns after preparing to execute
"""BackendRep is the handle that a Backend returns after preparing to execute
a model repeatedly. Users will then pass inputs to the run function of
BackendRep to retrieve the corresponding results.
"""
Expand All @@ -68,8 +64,7 @@ def run(self, inputs: Any, **kwargs: Any) -> Tuple[Any, ...]:


class Backend:
"""
Backend is the entity that will take an ONNX model with inputs,
"""Backend is the entity that will take an ONNX model with inputs,
perform a computation, and then return the output.

For one-off execution, users can use run_node and run_model to obtain results quickly.
Expand Down Expand Up @@ -112,6 +107,7 @@ def run_node(
**kwargs: Dict[str, Any],
) -> Optional[Tuple[Any, ...]]:
"""Simple run one operator and return the results.

Args:
outputs_info: a list of tuples, which contains the element type and
shape of each output. First element of the tuple is the dtype, and
Expand All @@ -131,8 +127,7 @@ def run_node(

@classmethod
def supports_device(cls, device: str) -> bool:
"""
Checks whether the backend is compiled with particular device support.
"""Checks whether the backend is compiled with particular device support.
In particular it's used in the testing suite.
"""
return True
1 change: 0 additions & 1 deletion onnx/backend/test/case/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ def expect(

def collect_testcases() -> List[TestCase]:
"""Collect model test cases defined in python/numpy code."""

real_model_testcases = []

model_tests = [
Expand Down