Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Component reduce #2480

Merged
merged 4 commits into from Jan 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
22 changes: 22 additions & 0 deletions gdsfactory/component.py
Expand Up @@ -65,6 +65,7 @@
Layers,
LayerSpec,
PathType,
Tuple,
)

valid_plotters = ["matplotlib", "klayout", "kweb"]
Expand Down Expand Up @@ -2596,8 +2597,29 @@ def metadata(self) -> dict:
)
return dict(self.settings)

def __reduce__(self):
"""Gdstk Cells cannot be directly pickled. This method overrides binary serialization with GDS serialization."""
return deserialize_gds, serialize_gds(self)


# Component functions
def serialize_gds(component: Component) -> Tuple[PathType]:
"""Saves Component as GDS + YAML metadata in temporary files with unique name."""
gds_filepath = GDSDIR_TEMP / component.name
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@joamatab should this use something else than the name to guarantee there are no collisions?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think name is the best we can use :)

gds_filepath = gds_filepath.with_suffix(".gds")
component.write_gds(gds_filepath, with_metadata=True)
return (gds_filepath,)


def deserialize_gds(gds_filepath: PathType) -> Component:
"""Loads Component as GDS + YAML metadata from temporary files, and deletes them."""
from gdsfactory.read import import_gds

c = import_gds(gds_filepath, read_metadata=True)
metadata_filepath = gds_filepath.with_suffix(".yml")
metadata_filepath.unlink()
gds_filepath.unlink()
return c


def copy(
Expand Down
63 changes: 63 additions & 0 deletions tests/test_component_pickle.py
@@ -0,0 +1,63 @@
from __future__ import annotations

import os
import pickle

import gdsfactory as gf


def ports_are_equal(port1, port2) -> bool:
return (
port1.x == port2.x
and port1.y == port2.y
and port1.width == port2.width
and port1.orientation == port2.orientation
and port1.layer == port2.layer
)


def components_are_equal(component1, component2) -> bool:
# Compare the basic metadata
if component1.name != component2.name or component1.settings != component2.settings:
return False

# Compare the ports
if set(component1.ports.keys()) != set(component2.ports.keys()):
return False
for port_name in component1.ports:
if not ports_are_equal(
component1.ports[port_name], component2.ports[port_name]
):
return False

# Compare the polygons
polygons1 = component1.get_polygons(by_spec=True)
polygons2 = component2.get_polygons(by_spec=True)
if set(polygons1.keys()) != set(polygons2.keys()):
return False
for spec in polygons1.keys():
if not all(
(p1 == p2).all() for p1, p2 in zip(polygons1[spec], polygons2[spec])
):
return False

# If all checks passed, the components are considered equal
return True


# Usage in your test
def test_component_pickle() -> None:
c1 = gf.components.straight()
with open("test.pkl", "wb") as f:
pickle.dump(c1, f)
with open("test.pkl", "rb") as f:
c2 = pickle.load(f)
os.remove("test.pkl")

assert components_are_equal(
c1, c2
), "The components are not equal after pickling and unpickling."


if __name__ == "__main__":
test_component_pickle()