Skip to content

Commit

Permalink
Support deeply nested protobuf objects in python (#10284)
Browse files Browse the repository at this point in the history
* Add recursion test to python

* Support calling SetAllowOversizeProtos on new protobuf library

* Add to CHANGELOG

* format

* lint

* lint
  • Loading branch information
Frassle committed Jul 29, 2022
1 parent 60481da commit 65ccad4
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 23 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG_PENDING.md
Expand Up @@ -7,3 +7,6 @@

- [codegen/nodejs] Correctly reference external enums.
[#10286](https://github.com/pulumi/pulumi/pull/10286)

- [sdk/python] Support deeply nested protobuf objects.
[#10284](https://github.com/pulumi/pulumi/pull/10284)
22 changes: 0 additions & 22 deletions sdk/python/lib/pulumi/runtime/invoke.py
Expand Up @@ -30,28 +30,6 @@
if TYPE_CHECKING:
from .. import Resource, Inputs, Output

# This setting overrides a hardcoded maximum protobuf size in the python protobuf bindings. This avoids deserialization
# exceptions on large gRPC payloads, but makes it possible to use enough memory to cause an OOM error instead [1].
# Note: We hit the default maximum protobuf size in practice when processing Kubernetes CRDs [2]. If this setting ends
# up causing problems, it should be possible to work around it with more intelligent resource chunking in the k8s
# provider.
#
# [1] https://github.com/protocolbuffers/protobuf/blob/0a59054c30e4f0ba10f10acfc1d7f3814c63e1a7/python/google/protobuf/pyext/message.cc#L2017-L2024
# [2] https://github.com/pulumi/pulumi-kubernetes/issues/984
#
# This setting requires a platform-specific and python version-specific .so file called
# `_message.cpython-[py-version]-[platform].so`, which is not present in situations when a new python version is
# released but the corresponding dist wheel has not been. So, we wrap the import in a try/except to avoid breaking all
# python programs using a new version.
try:
from google.protobuf.pyext._message import (
SetAllowOversizeProtos,
) # pylint: disable-msg=E0611

SetAllowOversizeProtos(True)
except ImportError:
pass


class InvokeResult:
"""
Expand Down
33 changes: 33 additions & 0 deletions sdk/python/lib/pulumi/runtime/rpc.py
Expand Up @@ -96,6 +96,39 @@

_INT_OR_FLOAT = six.integer_types + (float,)

# This setting overrides a hardcoded maximum protobuf size in the python protobuf bindings. This avoids deserialization
# exceptions on large gRPC payloads, but makes it possible to use enough memory to cause an OOM error instead [1].
# Note: We hit the default maximum protobuf size in practice when processing Kubernetes CRDs [2]. If this setting ends
# up causing problems, it should be possible to work around it with more intelligent resource chunking in the k8s
# provider.
#
# [1] https://github.com/protocolbuffers/protobuf/blob/0a59054c30e4f0ba10f10acfc1d7f3814c63e1a7/python/google/protobuf/pyext/message.cc#L2017-L2024
# [2] https://github.com/pulumi/pulumi-kubernetes/issues/984
#
# This setting requires a platform-specific and python version-specific .so file called
# `_message.cpython-[py-version]-[platform].so`, which is not present in situations when a new python version is
# released but the corresponding dist wheel has not been. So, we wrap the import in a try/except to avoid breaking all
# python programs using a new version.
try:
from google.protobuf.pyext._message import ( # pylint: disable-msg=C0412
SetAllowOversizeProtos,
) # pylint: disable-msg=E0611

SetAllowOversizeProtos(True)
except ImportError:
pass

# New versions of protobuf have moved the above import to api_implementation
try:
from google.protobuf.pyext import (
cpp_message,
) # pylint: disable-msg=E0611

if cpp_message._message is not None:
cpp_message._message.SetAllowOversizeProtos(True)
except ImportError:
pass


def isLegalProtobufValue(value: Any) -> bool:
"""
Expand Down
3 changes: 3 additions & 0 deletions sdk/python/mypy.ini
Expand Up @@ -10,6 +10,9 @@ ignore_missing_imports = True
[mypy-google.protobuf.pyext._message]
ignore_missing_imports = True

[mypy-google.protobuf.pyext]
ignore_missing_imports = True

# grpc generated
[mypy-pulumi.runtime.proto.*]
ignore_errors = True
7 changes: 6 additions & 1 deletion tests/integration/large_resource/python/__main__.py
Expand Up @@ -3,5 +3,10 @@
# Create a very long string (>4mb)
long_string = "a" * 5 * 1024 * 1025

# Export the name of the bucket
# Create a very deep array (>100 levels)
deep_array = []
for i in range(0, 200):
deep_array = [deep_array]

pulumi.export("long_string", long_string)
pulumi.export("deep_array", deep_array)

0 comments on commit 65ccad4

Please sign in to comment.