diff --git a/.bandit-baseline.json b/.bandit-baseline.json new file mode 100644 index 00000000..83610b62 --- /dev/null +++ b/.bandit-baseline.json @@ -0,0 +1,1091 @@ +{ + "errors": [], + "generated_at": "2025-11-22T19:54:06Z", + "metrics": { + "_totals": { + "CONFIDENCE.HIGH": 2, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 6, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 1, + "SEVERITY.MEDIUM": 7, + "SEVERITY.UNDEFINED": 0, + "loc": 22117, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 3, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 2, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/abstractions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 448, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 3, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/factory.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 454, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/http/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/http/consumed_interactions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 410, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/mqtt/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/mqtt/consumed_interactions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 83, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/proxy.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 494, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/zmq/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 0, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/client/zmq/consumed_interactions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 496, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/config.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 157, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/constants.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 90, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 5, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/actions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 340, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/dataklasses.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 127, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/events.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 119, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/logger.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 262, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/meta.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 773, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/properties.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 2315, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/property.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 303, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/state_machine.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 316, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/thing.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 1, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 1, + "SEVERITY.UNDEFINED": 0, + "loc": 332, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/zmq/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 10, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/zmq/brokers.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 2221, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/zmq/message.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 576, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/core/zmq/rpc_server.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 2, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 2, + "SEVERITY.UNDEFINED": 0, + "loc": 774, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 30, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/logger.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 69, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/param/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 55, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/param/exceptions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 44, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/param/extensions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 156, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/param/ipython.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 264, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/param/logger.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 35, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/param/parameterized.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 1788, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/param/parameters.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 2622, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/param/serializer.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 268, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/param/utils.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 68, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/schema_validators/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 2, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/schema_validators/json_schema.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 87, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/schema_validators/validators.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 89, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/serializers/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 9, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/serializers/payloads.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 47, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/serializers/serializers.py": { + "CONFIDENCE.HIGH": 2, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 1, + "SEVERITY.MEDIUM": 1, + "SEVERITY.UNDEFINED": 0, + "loc": 540, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/server/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 91, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/server/http/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 3, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 3, + "SEVERITY.UNDEFINED": 0, + "loc": 684, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/server/http/handlers.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 809, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/server/mqtt.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 170, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/server/security.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 127, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/server/server.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 82, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/server/utils.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 88, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/server/zmq.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 175, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/storage/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 28, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/storage/config_models.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 89, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/storage/database.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 569, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/storage/json_storage.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 144, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/__init__.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 7, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/base.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 40, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/data_schema.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 392, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/forms.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 58, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/interaction_affordance.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 404, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/metadata.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 15, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/pydantic_extensions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 165, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/security_definitions.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 18, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/tm.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 124, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/td/utils.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 16, + "nosec": 0, + "skipped_tests": 0 + }, + "hololinked/utils.py": { + "CONFIDENCE.HIGH": 0, + "CONFIDENCE.LOW": 0, + "CONFIDENCE.MEDIUM": 0, + "CONFIDENCE.UNDEFINED": 0, + "SEVERITY.HIGH": 0, + "SEVERITY.LOW": 0, + "SEVERITY.MEDIUM": 0, + "SEVERITY.UNDEFINED": 0, + "loc": 536, + "nosec": 0, + "skipped_tests": 0 + } + }, + "results": [ + { + "code": "268 port: int = 8080,\n269 address: str = \"0.0.0.0\",\n270 # host: str = None,\n271 allowed_clients: str | typing.Iterable[str] | None = None,\n272 ssl_context: ssl.SSLContext | None = None,\n273 # protocol_version : int = 1,\n274 # network_interface : str = 'Ethernet',\n275 forked: bool = False, # used by forkable decorator\n276 **kwargs: typing.Dict[str, typing.Any],\n277 ) -> None:\n278 \"\"\"\n279 Quick-start to serve `Thing` over HTTP. This method is fully blocking.\n", + "col_offset": 23, + "end_col_offset": 32, + "filename": "hololinked/core/thing.py", + "issue_confidence": "MEDIUM", + "issue_cwe": { + "id": 605, + "link": "https://cwe.mitre.org/data/definitions/605.html" + }, + "issue_severity": "MEDIUM", + "issue_text": "Possible binding to all interfaces.", + "line_number": 269, + "line_range": [ + 267, + 268, + 269, + 270, + 271, + 272, + 273, + 274, + 275, + 276 + ], + "more_info": "https://bandit.readthedocs.io/en/1.9.1/plugins/b104_hardcoded_bind_all_interfaces.html", + "test_id": "B104", + "test_name": "hardcoded_bind_all_interfaces" + }, + { + "code": "681 req_rep_socket_address = req_rep_socket_address.replace(\"*\", socket.gethostname()).replace(\n682 \"0.0.0.0\", socket.gethostname()\n683 )\n684 pub_sub_socket_address = self.tcp_event_publisher.socket_address # type: str\n685 pub_sub_socket_address = pub_sub_socket_address.replace(\"*\", socket.gethostname()).replace(\n", + "col_offset": 16, + "end_col_offset": 25, + "filename": "hololinked/core/zmq/rpc_server.py", + "issue_confidence": "MEDIUM", + "issue_cwe": { + "id": 605, + "link": "https://cwe.mitre.org/data/definitions/605.html" + }, + "issue_severity": "MEDIUM", + "issue_text": "Possible binding to all interfaces.", + "line_number": 682, + "line_range": [ + 681, + 682, + 683 + ], + "more_info": "https://bandit.readthedocs.io/en/1.9.1/plugins/b104_hardcoded_bind_all_interfaces.html", + "test_id": "B104", + "test_name": "hardcoded_bind_all_interfaces" + }, + { + "code": "685 pub_sub_socket_address = pub_sub_socket_address.replace(\"*\", socket.gethostname()).replace(\n686 \"0.0.0.0\", socket.gethostname()\n687 )\n688 else:\n689 raise ValueError(f\"Unsupported protocol '{protocol}' for ZMQ.\")\n", + "col_offset": 16, + "end_col_offset": 25, + "filename": "hololinked/core/zmq/rpc_server.py", + "issue_confidence": "MEDIUM", + "issue_cwe": { + "id": 605, + "link": "https://cwe.mitre.org/data/definitions/605.html" + }, + "issue_severity": "MEDIUM", + "issue_text": "Possible binding to all interfaces.", + "line_number": 686, + "line_range": [ + 685, + 686, + 687 + ], + "more_info": "https://bandit.readthedocs.io/en/1.9.1/plugins/b104_hardcoded_bind_all_interfaces.html", + "test_id": "B104", + "test_name": "hardcoded_bind_all_interfaces" + }, + { + "code": "34 # serializers:\n35 import pickle\n36 import typing\n", + "col_offset": 0, + "end_col_offset": 13, + "filename": "hololinked/serializers/serializers.py", + "issue_confidence": "HIGH", + "issue_cwe": { + "id": 502, + "link": "https://cwe.mitre.org/data/definitions/502.html" + }, + "issue_severity": "LOW", + "issue_text": "Consider possible security implications associated with pickle module.", + "line_number": 35, + "line_range": [ + 35 + ], + "more_info": "https://bandit.readthedocs.io/en/1.9.1/blacklists/blacklist_imports.html#b403-import-pickle", + "test_id": "B403", + "test_name": "blacklist" + }, + { + "code": "216 if global_config.ALLOW_PICKLE:\n217 return pickle.loads(self.convert_to_bytes(data))\n218 raise RuntimeError(\"Pickle deserialization is not allowed by the global configuration\")\n", + "col_offset": 19, + "end_col_offset": 60, + "filename": "hololinked/serializers/serializers.py", + "issue_confidence": "HIGH", + "issue_cwe": { + "id": 502, + "link": "https://cwe.mitre.org/data/definitions/502.html" + }, + "issue_severity": "MEDIUM", + "issue_text": "Pickle and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue.", + "line_number": 217, + "line_range": [ + 217 + ], + "more_info": "https://bandit.readthedocs.io/en/1.9.1/blacklists/blacklist_calls.html#b301-pickle", + "test_id": "B301", + "test_name": "blacklist" + }, + { + "code": "63 \n64 address = IPAddress(default=\"0.0.0.0\", doc=\"IP address\") # type: str\n65 \"\"\"IP address, especially to bind to all interfaces or not\"\"\"\n", + "col_offset": 32, + "end_col_offset": 41, + "filename": "hololinked/server/http/__init__.py", + "issue_confidence": "MEDIUM", + "issue_cwe": { + "id": 605, + "link": "https://cwe.mitre.org/data/definitions/605.html" + }, + "issue_severity": "MEDIUM", + "issue_text": "Possible binding to all interfaces.", + "line_number": 64, + "line_range": [ + 64 + ], + "more_info": "https://bandit.readthedocs.io/en/1.9.1/plugins/b104_hardcoded_bind_all_interfaces.html", + "test_id": "B104", + "test_name": "hardcoded_bind_all_interfaces" + }, + { + "code": "126 port: int = 8080,\n127 address: str = \"0.0.0.0\",\n128 things: typing.Optional[typing.List[Thing]] = None,\n129 # host: typing.Optional[str] = None,\n130 logger: typing.Optional[logging.Logger] = None,\n131 log_level: int = logging.INFO,\n132 ssl_context: typing.Optional[ssl.SSLContext] = None,\n133 security_schemes: typing.Optional[typing.List[Security]] = None,\n134 # protocol_version : int = 1, network_interface : str = 'Ethernet',\n135 allowed_clients: typing.Optional[typing.Union[str, typing.Iterable[str]]] = None,\n136 config: typing.Optional[dict[str, typing.Any]] = None,\n137 **kwargs,\n138 ) -> None:\n139 \"\"\"\n140 Parameters\n141 ----------\n", + "col_offset": 23, + "end_col_offset": 32, + "filename": "hololinked/server/http/__init__.py", + "issue_confidence": "MEDIUM", + "issue_cwe": { + "id": 605, + "link": "https://cwe.mitre.org/data/definitions/605.html" + }, + "issue_severity": "MEDIUM", + "issue_text": "Possible binding to all interfaces.", + "line_number": 127, + "line_range": [ + 124, + 125, + 126, + 127, + 128, + 129, + 130, + 131, + 132, + 133, + 134, + 135, + 136, + 137 + ], + "more_info": "https://bandit.readthedocs.io/en/1.9.1/plugins/b104_hardcoded_bind_all_interfaces.html", + "test_id": "B104", + "test_name": "hardcoded_bind_all_interfaces" + }, + { + "code": "702 return f\"{protocol}://{socket.gethostname()}{port}\"\n703 if self.server.address == \"0.0.0.0\" or self.server.address == \"127.0.0.1\":\n704 return f\"{protocol}://127.0.0.1{port}\"\n", + "col_offset": 34, + "end_col_offset": 43, + "filename": "hololinked/server/http/__init__.py", + "issue_confidence": "MEDIUM", + "issue_cwe": { + "id": 605, + "link": "https://cwe.mitre.org/data/definitions/605.html" + }, + "issue_severity": "MEDIUM", + "issue_text": "Possible binding to all interfaces.", + "line_number": 703, + "line_range": [ + 703 + ], + "more_info": "https://bandit.readthedocs.io/en/1.9.1/plugins/b104_hardcoded_bind_all_interfaces.html", + "test_id": "B104", + "test_name": "hardcoded_bind_all_interfaces" + } + ] +} \ No newline at end of file diff --git a/.github/workflows/ci-pipeline.yml b/.github/workflows/ci-pipeline.yml index 5681d70b..6e4eca9f 100644 --- a/.github/workflows/ci-pipeline.yml +++ b/.github/workflows/ci-pipeline.yml @@ -29,9 +29,39 @@ jobs: - name: run ruff linter run: ruff check hololinked + scan: + name: security scan with bandit + runs-on: ubuntu-latest + needs: codestyle + + steps: + - name: checkout code + uses: actions/checkout@v4 + + - name: set up python 3.11 + uses: actions/setup-python@v3 + with: + python-version: 3.11 + + - name: install bandit + run: pip install bandit + + - name: run bandit security scan + run: | + bandit -c pyproject.toml -r hololinked/ -b .bandit-baseline.json + echo "----------------------------" + echo "Rerunning to generate bandit report in JSON format..." + bandit -c pyproject.toml -r hololinked/ -f json -b .bandit-baseline.json -o bandit-report.json + + - name: upload bandit report artifact + uses: actions/upload-artifact@v4 + with: + name: bandit-security-scan-report + path: bandit-report.json + test: name: unit-integration tests - needs: codestyle + needs: scan strategy: matrix: diff --git a/hololinked/client/factory.py b/hololinked/client/factory.py index f5ebc522..31459d9f 100644 --- a/hololinked/client/factory.py +++ b/hololinked/client/factory.py @@ -1,37 +1,37 @@ +import base64 +import ssl import threading import uuid -import base64 import warnings +from typing import Any + import aiomqtt import httpx -import ssl import structlog -from typing import Any -from paho.mqtt.client import Client as PahoMQTTClient, MQTTProtocolVersion, CallbackAPIVersion, MQTTMessage - +from paho.mqtt.client import CallbackAPIVersion, MQTTMessage, MQTTProtocolVersion +from paho.mqtt.client import Client as PahoMQTTClient -from ..core import Thing, Action -from ..core.zmq import SyncZMQClient, AsyncZMQClient +from ..constants import ZMQ_TRANSPORTS +from ..core import Thing +from ..core.zmq import AsyncZMQClient, SyncZMQClient +from ..serializers import Serializers from ..td.interaction_affordance import ( - PropertyAffordance, ActionAffordance, EventAffordance, + PropertyAffordance, ) -from ..serializers import Serializers from ..utils import set_global_event_loop_policy -from ..constants import ZMQ_TRANSPORTS -from .abstractions import ConsumedThingAction, ConsumedThingProperty, ConsumedThingEvent +from .abstractions import ConsumedThingAction, ConsumedThingEvent, ConsumedThingProperty +from .http.consumed_interactions import HTTPAction, HTTPEvent, HTTPProperty +from .mqtt.consumed_interactions import MQTTConsumer # only one type for now from .proxy import ObjectProxy -from .http.consumed_interactions import HTTPProperty, HTTPAction, HTTPEvent from .zmq.consumed_interactions import ( + ReadMultipleProperties, + WriteMultipleProperties, ZMQAction, ZMQEvent, ZMQProperty, - WriteMultipleProperties, - ReadMultipleProperties, ) -from .mqtt.consumed_interactions import MQTTConsumer # only one type for now - set_global_event_loop_policy() @@ -103,7 +103,7 @@ def zmq( async_zmq_client = AsyncZMQClient(f"{id}|async", server_id=server_id, logger=logger, access_point=access_point) # Fetch the TD - assert isinstance(Thing.get_thing_model, Action) + Thing.get_thing_model # type: Action FetchTDAffordance = Thing.get_thing_model.to_affordance() FetchTDAffordance.override_defaults(name="get_thing_description", thing_id=thing_id) FetchTD = ZMQAction( diff --git a/hololinked/core/properties.py b/hololinked/core/properties.py index 2caddffb..146f85f1 100644 --- a/hololinked/core/properties.py +++ b/hololinked/core/properties.py @@ -156,7 +156,7 @@ class IPAddress(Property): def __init__( self, - default: typing.Optional[str] = "0.0.0.0", + default: typing.Optional[str] = "127.0.0.1", *, allow_ipv4: bool = True, allow_ipv6: bool = True, @@ -512,7 +512,6 @@ def _crop_to_bounds(self, value: typing.Union[int, float]) -> typing.Union[int, """ # Values outside the bounds are silently cropped to # be inside the bounds. - assert self.bounds is not None, "Cannot crop to bounds when bounds is None" vmin, vmax = self.bounds incmin, incmax = self.inclusive_bounds if vmin is not None: diff --git a/hololinked/core/property.py b/hololinked/core/property.py index 7c4fcab5..4ef80ccf 100644 --- a/hololinked/core/property.py +++ b/hololinked/core/property.py @@ -1,10 +1,10 @@ import typing from enum import Enum -from ..param.parameterized import Parameter, Parameterized, ParameterizedMetaclass -from ..utils import issubklass from ..exceptions import StateMachineError +from ..param.parameterized import Parameter, Parameterized, ParameterizedMetaclass from ..schema_validators import JSONSchemaValidator +from ..utils import issubklass from .dataklasses import RemoteResourceInfoValidator from .events import Event, EventDispatcher # noqa: F401 @@ -264,11 +264,6 @@ def external_set(self, obj: Parameterized, value: typing.Any) -> None: def _post_value_set(self, obj, value: typing.Any) -> None: if (self.db_persist or self.db_commit) and hasattr(obj, "db_engine"): - from .thing import Thing - - assert isinstance(obj, Thing), ( - f"database property {self.name} bound to a non Thing, currently not supported" - ) obj.db_engine.set_property(self, value) self.push_change_event(obj, value) return super()._post_value_set(obj, value) @@ -330,7 +325,7 @@ def to_affordance(self, owner_inst=None): try: - from pydantic import BaseModel, RootModel, create_model, ConfigDict + from pydantic import BaseModel, ConfigDict, RootModel, create_model def wrap_plain_types_in_rootmodel(model: type) -> type[BaseModel] | type[RootModel]: """ diff --git a/hololinked/core/state_machine.py b/hololinked/core/state_machine.py index 2e988884..40037482 100644 --- a/hololinked/core/state_machine.py +++ b/hololinked/core/state_machine.py @@ -1,14 +1,14 @@ import typing +from enum import Enum, EnumMeta, StrEnum from types import FunctionType, MethodType -from enum import EnumMeta, Enum, StrEnum -from ..param import edit_constant from ..exceptions import StateMachineError +from ..param import edit_constant +from .actions import Action +from .meta import ThingMeta +from .properties import Boolean, ClassSelector, TypedDict from .property import Property -from .properties import ClassSelector, TypedDict, Boolean from .thing import Thing -from .meta import ThingMeta -from .actions import Action class StateMachine: @@ -364,7 +364,6 @@ def machine(self): def prepare_object_FSM(instance: Thing) -> None: """validate and prepare the state machine attached to a Thing class""" - assert isinstance(instance, Thing), "state machine can only be attached to a Thing class." cls = instance.__class__ if cls.state_machine and isinstance(cls.state_machine, StateMachine): cls.state_machine.validate(instance) diff --git a/hololinked/core/zmq/brokers.py b/hololinked/core/zmq/brokers.py index 89e4db1d..688516a9 100644 --- a/hololinked/core/zmq/brokers.py +++ b/hololinked/core/zmq/brokers.py @@ -1,20 +1,21 @@ +import asyncio import os +import threading import time +import typing import warnings +from enum import Enum + +import structlog import zmq import zmq.asyncio -import asyncio -import threading -import typing -import structlog -from enum import Enum from zmq.utils.monitor import parse_monitor_message -from ...utils import format_exception_as_json, run_callable_somehow, uuid_hex, get_current_async_loop from ...config import global_config from ...constants import ZMQ_EVENT_MAP, ZMQ_TRANSPORTS, get_socket_type_name -from ...serializers.serializers import Serializers from ...exceptions import BreakLoop +from ...serializers.serializers import Serializers +from ...utils import format_exception_as_json, get_current_async_loop, run_callable_somehow, uuid_hex from .message import ( ERROR, EXIT, @@ -24,14 +25,14 @@ SERVER_DISCONNECTED, TIMEOUT, EventMessage, + PreserializedData, + PreserializedEmptyByte, RequestMessage, ResponseMessage, SerializableData, - PreserializedData, + SerializableNone, ServerExecutionContext, ThingExecutionContext, - SerializableNone, - PreserializedEmptyByte, default_server_execution_context, default_thing_execution_context, ) @@ -119,7 +120,8 @@ def get_socket( RuntimeError if transport is `TCP` and a socket connection from client side is requested but a socket address is not supplied """ - assert node_type.lower() in ["server", "client"], f"Invalid node_type: {node_type}" + if node_type.lower() not in ["server", "client"]: + raise ValueError(f"Invalid node_type: {node_type}") bind = node_type.lower() == "server" if len(access_point) == 3 or len(access_point) == 6 or isinstance(access_point, Enum): transport = access_point @@ -929,11 +931,8 @@ def exit(self) -> None: BaseZMQ.exit(self) self.poller.unregister(self.socket) # TODO - there is some issue here while quitting - # print("poller exception did not occur 1") if self._monitor_socket is not None: - # print("poller exception did not occur 2") self.poller.unregister(self._monitor_socket) - # print("poller exception did not occur 3") except Exception as ex: # noqa # TODO log message and undo noqa # raises a weird key error for some reason @@ -941,8 +940,7 @@ def exit(self) -> None: # unable to deregister from poller - - KeyError # unable to deregister from poller - - KeyError # unable to deregister from poller - - KeyError - # self.logger.warning(f"unable to deregister from poller - {str(ex)} - {type(ex).__name__}") - pass + self.logger.warning(f"unable to deregister socket from poller - {str(ex)} - {type(ex).__name__}") try: if self._monitor_socket is not None: self._monitor_socket.close(0) @@ -1151,7 +1149,7 @@ def recv_response(self, message_id: bytes) -> ResponseMessage: # put the expected message in response message cache # 2. also release the lock in every iteration because a message may be added in response cache # and may not return the method, which means the loop will run again and the lock needs to reacquired - pass + self.logger.warning(f"could not release poller lock for recv_response - {str(ex)}") def execute( self, @@ -1462,8 +1460,8 @@ async def async_recv_response(self, message_id: str) -> typing.List[ResponseMess finally: try: self._poller_lock.release() - except Exception: - pass + except Exception as ex: + self.logger.warning(f"could not release poller lock for async_recv_response - {str(ex)}") async def async_execute( self, @@ -2145,9 +2143,6 @@ def register(self, event: "EventDispatcher") -> None: `Event` object that needs to be registered. Events created at `__init__()` of `Thing` are automatically registered. """ - from ...core.events import EventDispatcher - - assert isinstance(event, EventDispatcher), "event must be an instance of EventDispatcher" if event._unique_identifier in self.events and event not in self.events: raise AttributeError(f"event {event._unique_identifier} already registered, please use another name.") self.event_ids.add(event._unique_identifier) @@ -2215,8 +2210,8 @@ def publish(self, event, data: typing.Any) -> None: finally: try: self._send_lock.release() - except Exception: - pass + except Exception as ex: + self.logger.warning(f"could not release publish lock for event publisher - {str(ex)}") def exit(self): try: @@ -2282,7 +2277,7 @@ def __init__( id=id, event_id=event_unique_identifier, ) - self.logger = logger + self.logger = logger # type: structlog.stdlib.BoundLogger self.create_socket( server_id=id, socket_id=id, @@ -2336,10 +2331,7 @@ def exit(self): self.poller.unregister(self.interruptor) except Exception as ex: # noqa # TODO - log message and undo noqa - # self.logger.warning("could not properly terminate socket or attempted to terminate an already terminated socket of event consuming socket at address '{}'. Exception message: {}".format( - # self.socket_address, str(E))) - # above line prints too many warnings - pass + self.logger.warning(f"could not unregister sockets from poller for event consumer - {str(ex)}") try: self.socket.close(0) self.interruptor.close(0) @@ -2395,8 +2387,8 @@ def receive( finally: try: self._poller_lock.release() - except Exception: - pass + except Exception as ex: + self.logger.warning(f"could not release poller lock for event receive - {str(ex)}") def interrupt(self): """ @@ -2459,8 +2451,8 @@ async def receive( finally: try: self._poller_lock.release() - except Exception: - pass + except Exception as ex: + self.logger.warning(f"could not release poller lock for event receive - {str(ex)}") async def interrupt(self): """ diff --git a/hololinked/core/zmq/rpc_server.py b/hololinked/core/zmq/rpc_server.py index 23c5f97c..0a47453c 100644 --- a/hololinked/core/zmq/rpc_server.py +++ b/hololinked/core/zmq/rpc_server.py @@ -1,26 +1,32 @@ +import asyncio import copy +import logging import socket -import zmq -import zmq.asyncio -import asyncio -import typing import threading -import logging import tracemalloc -import structlog +import typing from collections import deque +import structlog +import zmq +import zmq.asyncio -from ...exceptions import BreakLoop, BreakInnerLoop +from ...config import global_config from ...constants import ZMQ_TRANSPORTS, Operations +from ...exceptions import BreakInnerLoop, BreakLoop +from ...serializers import BaseSerializer, Serializers from ...utils import ( format_exception_as_json, get_all_sub_things_recusively, get_current_async_loop, set_global_event_loop_policy, ) -from ...config import global_config -from ...serializers import Serializers, BaseSerializer +from ..actions import BoundAction # noqa: F401 +from ..logger import LogHistoryHandler +from ..properties import TypedList +from ..property import Property # noqa: F401 +from ..thing import Thing +from .brokers import AsyncZMQServer, BaseZMQServer, EventPublisher from .message import ( EMPTY_BYTE, ERROR, @@ -29,13 +35,6 @@ RequestMessage, SerializableData, ) -from .brokers import AsyncZMQServer, BaseZMQServer, EventPublisher -from ..thing import Thing -from ..property import Property # noqa: F401 -from ..properties import TypedList -from ..actions import BoundAction # noqa: F401 -from ..logger import LogHistoryHandler - if global_config.TRACE_MALLOC: tracemalloc.start() @@ -143,7 +142,6 @@ def add_thing(self, thing: Thing) -> None: # setup scheduling requirements all_things = get_all_sub_things_recusively(thing) for instance in all_things: - assert isinstance(instance, Thing), "instance must be of type Thing" instance.rpc_server = self for action in instance.actions.descriptors.values(): if action.execution_info.iscoroutine and not action.execution_info.synchronous: @@ -661,7 +659,7 @@ def get_thing_description( """ TM = instance.get_thing_model(ignore_errors=ignore_errors, skip_names=skip_names).json() # type: dict[str, typing.Any] TD = copy.deepcopy(TM) - from ...td import PropertyAffordance, ActionAffordance, EventAffordance + from ...td import ActionAffordance, EventAffordance, PropertyAffordance from ...td.forms import Form if protocol.lower() == "inproc": diff --git a/hololinked/param/__init__.py b/hololinked/param/__init__.py index 110385f0..977b3b43 100644 --- a/hololinked/param/__init__.py +++ b/hololinked/param/__init__.py @@ -65,12 +65,3 @@ from .logger import get_logger, logging_level, VERBOSE -# Determine up-to-date version information, if possible, but with a -# safe fallback to ensure that this file and parameterized.py are the -# only two required files. -try: - from .version import Version - - __version__ = str(Version(fpath=__file__, archive_commit="$Format:%h$", reponame="param")) -except: - __version__ = "0.0.0+unknown" diff --git a/hololinked/param/_async.py b/hololinked/param/_async.py deleted file mode 100644 index 85587a7c..00000000 --- a/hololinked/param/_async.py +++ /dev/null @@ -1,22 +0,0 @@ -""" -Module that implements asyncio.coroutine function wrappers to be used -by param internal callbacks. These are defined in a separate file due -to py2 incompatibility with both `async/await` and `yield from` syntax. -""" - -# import asyncio - -# def generate_depends(func): -# @asyncio.coroutine -# def _depends(*args, **kw): -# yield from func(*args, **kw) # noqa: E999 -# return _depends - - -# def generate_callback(func, dependencies, kw): -# @asyncio.coroutine -# def cb(*events): -# args = (getattr(dep.owner, dep.name) for dep in dependencies) -# dep_kwargs = {n: getattr(dep.owner, dep.name) for n, dep in kw.items()} -# yield from func(*args, **dep_kwargs) # noqa: E999 -# return cb diff --git a/hololinked/param/exceptions.py b/hololinked/param/exceptions.py index 073e06bc..44a10614 100644 --- a/hololinked/param/exceptions.py +++ b/hololinked/param/exceptions.py @@ -1,19 +1,7 @@ -import textwrap import typing from contextlib import contextmanager -def wrap_error_text(text: str) -> str: - # return T.wrap(text) - #'\n'+'\n'.join([line.lstrip() - return textwrap.fill( - text=textwrap.dedent(text).lstrip(), - initial_indent="\n", - expand_tabs=True, - replace_whitespace=True, - ) - - def raise_TypeError(message, parameter) -> typing.NoReturn: owner_str = "" if isinstance(parameter, Parameter): @@ -66,4 +54,4 @@ def exceptions_summarized(): from .parameterized import Parameter -__all__ = ["wrap_error_text", "raise_TypeError", "raise_ValueError", "get_iterable_printfriendly_repr"] +__all__ = ["raise_TypeError", "raise_ValueError", "get_iterable_printfriendly_repr"] diff --git a/hololinked/param/parameterized.py b/hololinked/param/parameterized.py index 16e3de94..177d24ec 100644 --- a/hololinked/param/parameterized.py +++ b/hololinked/param/parameterized.py @@ -42,7 +42,7 @@ import numpy as np dt_types = dt_types + (np.datetime64,) -except: +except ImportError: pass # External components can register an async executor which will run @@ -719,9 +719,6 @@ class SortedDependencies: dynamic: typing.List[DynamicDependencyInfo] = field(default_factory=list) def __iadd__(self, other: "SortedDependencies") -> "SortedDependencies": - assert isinstance(other, SortedDependencies), wrap_error_text( - f"Can only add other ResolvedDepedency types to iteself, given type {type(other)}" - ) self.static += other.static self.dynamic += other.dynamic return self @@ -746,12 +743,9 @@ def decorator(func): for dep in deps: if not isinstance(dep, (str, Parameter)): raise ValueError( - wrap_error_text( - f"""The depends_on decorator only accepts string types referencing a parameter or parameter - instances, found {type(dep).__name__} type instead.""" - ) + f"The depends_on decorator only accepts string types referencing a parameter or parameter " + + f"instances, found {type(dep).__name__} type instead." ) - _dinfo = GeneralDependencyInfo(dependencies=deps, queued=queued, on_init=on_init, invoke=invoke) if hasattr(func, "param_dependency_info") and not isinstance(func.param_dependency_info, GeneralDependencyInfo): raise TypeError(f"attribute 'param_depency_info' reserved by param library, please use another name.") @@ -998,15 +992,17 @@ def create_unresolved_watcher_info(self, owner_class_members: dict): for mcs_super in classlist(self.owner_cls)[:-1][::-1]: if isinstance(mcs_super, ParameterizedMetaclass): for dep in mcs_super.parameters.event_resolver._unresolved_watcher_info: # type: ignore - why doesnt it work? - assert isinstance(dep, UnresolvedWatcherInfo), wrap_error_text( # dummy assertion to check types - f"""Parameters._unresolved_watcher_info only accept UnresolvedWatcherInfo type, given type {type(dep)}""" - ) + if not isinstance(dep, UnresolvedWatcherInfo): + raise TypeError( + f"Parameters._unresolved_watcher_info only accept UnresolvedWatcherInfo type, given type {type(dep)}" + ) method = getattr(mcs_super, dep.method_name, None) if method is not None and hasattr(method, "param_dependency_info"): - assert isinstance(method.param_dependency_info, GeneralDependencyInfo), wrap_error_text( - f"""attribute 'param_depency_info' reserved by param library, - please use another name for your attributes of type {type(method.param_dependency_info)}.""" - ) # dummy assertion to check types + if not isinstance(method.param_dependency_info, GeneralDependencyInfo): + raise TypeError( + f"attribute 'param_depency_info' reserved by param library, " + + f"please use another name for your attributes of type {type(method.param_dependency_info)}." + ) dinfo: GeneralDependencyInfo = method.param_dependency_info if not any(dep.method_name == w.method_name for w in _watch + _inherited) and dinfo.invoke: _inherited.append(dep) @@ -1078,8 +1074,8 @@ def convert_notation_to_dependency_info( cls = depended_obj_notation.owner if not isinstance(cls, ParameterizedMetaclass): raise TypeError( - wrap_error_text("""Currently dependencies of a parameter from another class except a subclass - of parameterized is not supported""") + "Currently dependencies of a parameter from another class " + + "except a subclass of parameterized is not supported." ) info = ParameterDependencyInfo( inst=inst, @@ -1122,8 +1118,11 @@ def convert_notation_to_dependency_info( cls = (src, None) if isinstance(src, type) else (type(src), src) if attr == "parameters": - assert isinstance(obj, str), wrap_error_text("""object preceding parameters access (i.e. .parameters) - in dependency resolution became None due to internal error.""") + if not isinstance(obj, str): + raise TypeError( + "object preceding parameters access (i.e. .parameters) " + + "in dependency resolution became None due to internal error." + ) sorted_dependencies = self.convert_notation_to_dependency_info(obj[1:], dynamic, intermediate) for p in src.parameters: sorted_dependencies += src.parameters.event_resolver.convert_notation_to_dependency_info( @@ -1144,18 +1143,13 @@ def convert_notation_to_dependency_info( return SortedDependencies() elif isinstance(attr_obj, FunctionType): raise NotImplementedError( - wrap_error_text( - f"""In this version of param, support for dependency on other callbacks is removed. - Please divide your methods with your own logic. - """ - ) + f"In this version of param, support for dependency on other callbacks is removed." + + " Please divide your methods with your own logic." ) else: raise AttributeError( - wrap_error_text( - f"""Attribute {attr!r} could not be resolved on {src} or resolved attribute not supported - for dependent events""" - ) + f"Attribute {attr!r} could not be resolved on {src} or resolved attribute not supported " + + "for dependent events" ) else: raise AttributeError(f"Attribute {attr!r} could not be resolved on {src}.") @@ -1169,14 +1163,17 @@ def parse_notation(cls, notation: str) -> typing.Tuple[typing.Union[str, None], 2. The attribute being depended on, i.e. either a parameter or method 3. The parameter attribute being depended on """ - assert notation.count(":") <= 1, "argument '{notation}' for depends has more than one colon" + if not notation.count(":") <= 1: + raise ValueError(f"argument '{notation}' for depends has more than one colon") notation = notation.strip() m = re.match(r"(?P[^:]*):?(?P.*)", notation) - assert m is not None, f"could not parse object notation for finding dependecies {notation}" + if m is None: + raise ValueError(f"could not parse object notation for finding dependecies {notation}") what = m.group("what") path = "." + m.group("path") m = re.match(r"(?P.*)(\.)(?P.*)", path) - assert m is not None, f"could not parse object notation for finding dependecies {notation}" + if m is None: + raise ValueError(f"could not parse object notation for finding dependecies {notation}") obj = m.group("obj") attr = m.group("attr") return obj or None, attr, what or "value" @@ -1537,9 +1534,9 @@ def execute_watcher(self, watcher: Watcher, events: typing.Tuple[Event]): if iscoroutinefunction(watcher.fn): if async_executor is None: raise RuntimeError( - wrap_error_text(f"""Could not execute {watcher.fn} coroutine function. Please - register a asynchronous executor on param.parameterized.async_executor, which - schedules the function on an event loop.""") + f"Could not execute {watcher.fn} coroutine function. Please " + + "register a asynchronous executor on param.parameterized.async_executor, which " + + "schedules the function on an event loop." ) async_executor(partial(watcher.fn, *args, **kwargs)) else: @@ -1553,7 +1550,7 @@ def trigger(self, *parameters: str) -> None: changed for a Parameter of type Event, setting it to True so that it is clear which Event parameter has been triggered. """ - raise NotImplementedError(wrap_error_text("""Triggering of events is not supported due to incomplete logic.""")) + raise NotImplementedError("Triggering of events is not supported due to incomplete logic.") trigger_params = [ p for p in self_.self_or_cls.param if hasattr(self_.self_or_cls.param[p], "_autotrigger_value") ] @@ -2210,7 +2207,6 @@ def descendents(class_: type) -> typing.List[type]: The list is ordered from least- to most-specific. Can be useful for printing the contents of an entire class hierarchy. """ - assert isinstance(class_, type) q = [class_] out = [] while len(q): diff --git a/hololinked/param/parameters.py b/hololinked/param/parameters.py index 61a8df03..f457e487 100644 --- a/hololinked/param/parameters.py +++ b/hololinked/param/parameters.py @@ -228,7 +228,7 @@ class IPAddress(Parameter): def __init__( self, - default: typing.Optional[str] = "0.0.0.0", + default: typing.Optional[str] = "127.0.0.1", *, allow_ipv4: bool = True, allow_ipv6: bool = True, @@ -560,7 +560,6 @@ def _crop_to_bounds(self, value: typing.Union[int, float]) -> typing.Union[int, """ # Values outside the bounds are silently cropped to # be inside the bounds. - assert self.bounds is not None, "Cannot crop to bounds when bounds is None" vmin, vmax = self.bounds incmin, incmax = self.inclusive_bounds if vmin is not None: @@ -2331,10 +2330,8 @@ def _validate_item(self, value: typing.Any): def _validate_bounds_for_set(self, value: typing.Any) -> None: if not (value.__len__() >= self.bounds[0] and value.__len__() <= self.bounds[1]): raise ValueError( - wrap_error_text( - f"""given list {get_iterable_printfriendly_repr(value)} has length out of bounds {self.bounds}. - given length : {value.__len__()}""" - ) + f"given list {get_iterable_printfriendly_repr(value)} has length out of bounds {self.bounds}. " + + f"given length : {value.__len__()}" ) def _validate_bounds_for_extension(self, value: typing.Any = [None]) -> None: @@ -2343,10 +2340,8 @@ def _validate_bounds_for_extension(self, value: typing.Any = [None]) -> None: and self._inner.__len__() + value.__len__() <= self.bounds[1] ): raise ValueError( - wrap_error_text( - f"""given list for extending {get_iterable_printfriendly_repr(value)} extends existing list longer - than bounds {self.bounds}. given length : {self._inner.__len__() + value.__len__()}""" - ) + f"given list for extending {get_iterable_printfriendly_repr(value)} extends existing list longer " + + f"than bounds {self.bounds}. given length : {self._inner.__len__() + value.__len__()}" ) def __len__(self) -> int: @@ -2478,17 +2473,15 @@ def _validate_items(self, value: typing.Any) -> None: for val in value: if not isinstance(val, self.item_type): raise TypeError( - wrap_error_text(f""" - Not all elements of list {get_iterable_printfriendly_repr(value)} given are of allowed item type(s), - which are : {self.item_type}. Given type {type(val)}. Cannot set or extend typed list.""") + f"Not all elements of list {get_iterable_printfriendly_repr(value)} given are of allowed item type(s), " + + f"which are : {self.item_type}. Given type {type(val)}. Cannot set or extend typed list." ) def _validate_item(self, value: typing.Any): if self.item_type is not None and not isinstance(value, self.item_type): raise TypeError( - wrap_error_text(f""" - Not all elements given are of allowed item type(s), which are : {self.item_type}. - Given type {type(value)}. Cannot append or insert in typed list.""") + f"Not all elements given are of allowed item type(s), which are : {self.item_type}. " + + f"Given type {type(value)}. Cannot append or insert in typed list." ) def __iadd__(self, value: typing.List[typing.Any]): @@ -2626,23 +2619,20 @@ def _validate_for_insertion(self, value: typing.Dict) -> None: def _validate_value(self, value) -> None: if not isinstance(value, dict): raise TypeError( - wrap_error_text(f""" - Given value for typed dictionary is not a dictionary. Given type : {type(value)}. Expected dictionary.""") + f"Given value for typed dictionary is not a dictionary. Given type : {type(value)}. Expected dictionary." ) def _validate_bounds_for_set(self, value: typing.Dict) -> None: if not (self.bounds[0] <= value.__len__() <= self.bounds[1]): raise ValueError( - wrap_error_text(f""" - Given dictionary length outside bounds. Given length {value.__len__()}, expected length : {self.bounds}""") + f"Given dictionary length outside bounds. Given length {value.__len__()}, expected length : {self.bounds}" ) def _validate_bounds_for_extension(self, value: typing.Dict = {"dummy": "dummy"}) -> None: if not (self.bounds[0] <= self._inner.__len__() + value.__len__() <= self.bounds[1]): raise ValueError( - wrap_error_text(f""" - Extending dictionary crosses bounds. Existing length {self._inner.__len__()}, - length of items to be added : {value.__len__()}, allowed bounds : {self.bounds}""") + f"Extending dictionary crosses bounds. Existing length {self._inner.__len__()}, " + + f"length of items to be added : {value.__len__()}, allowed bounds : {self.bounds}" ) def _validate_items(self, value: typing.Dict[typing.Any, typing.Any]) -> None: @@ -2652,17 +2642,15 @@ def _validate_items(self, value: typing.Dict[typing.Any, typing.Any]) -> None: for key in keys: if not isinstance(key, self.key_type): raise TypeError( - wrap_error_text(f""" - Keys for typed dictionary contain incompatible types. - Allowed types : {self.key_type}, given type : {type(key)}""") + f"Keys for typed dictionary contain incompatible types. " + + f"Allowed types : {self.key_type}, given type : {type(key)}" ) if self.item_type is not None and len(values) != 0: for value in values: if not isinstance(value, self.item_type): raise TypeError( - wrap_error_text(f""" - Values for typed dictionary contain incompatible types. - Allowed types : {self.item_type}. given type : {type(value)}""") + f"Values for typed dictionary contain incompatible types. " + + f"Allowed types : {self.item_type}. given type : {type(value)}" ) def _validate_key_value_pair(self, __key: typing.Any, __value: typing.Any) -> None: @@ -2796,8 +2784,7 @@ def _validate_key_value_pair(self, __key: typing.Any, __value: typing.Any) -> No raise KeyError(f"Keys except {self.key_list} not allowed for typed dictionary. Given key : {__key}.") elif not isinstance(__value, self.type_mapping[__key]): raise TypeError( - wrap_error_text(f""" - Value for key {__key} not of expected type : {self.type_mapping[__key]}. Given type : {type(__value)}.""") + f"Value for key {__key} not of expected type : {self.type_mapping[__key]}. Given type : {type(__value)}." ) def copy(self, return_as_typed: bool = False) -> typing.Union["TypedKeyMappingsConstrainedDict", typing.Dict]: diff --git a/hololinked/param/version.py b/hololinked/param/version.py deleted file mode 100644 index 0981cc1a..00000000 --- a/hololinked/param/version.py +++ /dev/null @@ -1,767 +0,0 @@ -""" -Provide consistent and up-to-date ``__version__`` strings for -Python packages. - -See https://github.com/holoviz/autover for more information. -""" - -# The Version class is a copy of autover.version.Version v0.2.5, -# except as noted below. -# -# The current version of autover supports a workflow based on tagging -# a git repository, and reports PEP440 compliant version information. -# Previously, the workflow required editing of version numbers in -# source code, and the version was not necessarily PEP440 compliant. -# Version.__new__ is added here to provide the previous Version class -# (OldDeprecatedVersion) if Version is called in the old way. - -__author__ = "Jean-Luc Stevens" - -import os, subprocess, json - - -def run_cmd(args, cwd=None): - proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd) - output, error = (str(s.decode()).strip() for s in proc.communicate()) - - # Detects errors as _either_ a non-zero return code _or_ messages - # printed to stderr, because the return code is erroneously fixed at - # zero in some cases (see https://github.com/holoviz/param/pull/389). - if proc.returncode != 0 or len(error) > 0: - raise Exception(proc.returncode, error) - return output - - -class Version(object): - """ - A simple approach to Python package versioning that supports PyPI - releases and additional information when working with version - control. When obtaining a package from PyPI, the version returned - is a string-formatted rendering of the supplied release tuple. - For instance, release (1,0) tagged as ``v1.0`` in the version - control system will return ``1.0`` for ``str(__version__)``. Any - number of items can be supplied in the release tuple, with either - two or three numeric versioning levels typical. - - During development, a command like ``git describe`` will be used to - compute the number of commits since the last version tag, the short - commit hash, and whether the commit is dirty (has changes not yet - committed). Version tags must start with a lowercase 'v' and have a - period in them, e.g. v2.0, v0.9.8 or v0.1 and may include the PEP440 - prerelease identifiers of 'a' (alpha) 'b' (beta) or 'rc' (release - candidate) allowing tags such as v2.0.a3, v0.9.8.b3 or v0.1.rc5. - - Also note that when version control system (VCS) information is - used, the number of commits since the last version tag is - determined. This approach is often useful in practice to decide - which version is newer for a single developer, but will not - necessarily be reliable when comparing against a different fork or - branch in a distributed VCS. - - For git, if you want version control information available even in - an exported archive (e.g. a .zip file from GitHub), you can set - the following line in the .gitattributes file of your project:: - - __init__.py export-subst - - Note that to support pip installation directly from GitHub via git - archive, a .version file must be tracked by the repo to supply the - release number (otherwise only the short SHA is available). - - The PEP440 format returned is [N!]N(.N)*[{a|b|rc}N][.postN+SHA] - where everything before .postN is obtained from the tag, the N in - .postN is the number of commits since the last tag and the SHA is - obtained via git describe. This later portion is only shown if the - commit count since the last tag is non zero. Instead of '.post', an - alternate valid prefix such as '.rev', '_rev', '_r' or '.r' may be - supplied.""" - - def __new__(cls, **kw): - # If called in the old way, provide the previous class. Means - # PEP440/tag based workflow warning below will never appear. - if ( - ("release" in kw and kw["release"] is not None) - or ("dev" in kw and kw["dev"] is not None) - or ("commit_count" in kw) - ): - return OldDeprecatedVersion(**kw) - else: - return super(Version, cls).__new__(cls) - - def __init__( - self, - release=None, - fpath=None, - commit=None, - reponame=None, - commit_count_prefix=".post", - archive_commit=None, - **kwargs, - ): - """ - :release: Release tuple (corresponding to the current VCS tag) - :commit Short SHA. Set to '$Format:%h$' for git archive support. - :fpath: Set to ``__file__`` to access version control information - :reponame: Used to verify VCS repository name. - """ - self.fpath = fpath - self._expected_commit = commit - - if release is not None or "commit_count" in kwargs: - print( - "WARNING: param.Version now supports PEP440 and a new tag based workflow. See param/version.py for more details" - ) - - self.expected_release = release - - self._commit = None if (commit is None or commit.startswith("$Format")) else commit - self._commit_count = None - self._release = None - self._dirty = False - self._prerelease = None - - self.archive_commit = archive_commit - - self.reponame = reponame - self.commit_count_prefix = commit_count_prefix - - @property - def prerelease(self): - """ - Either None or one of 'aN' (alpha), 'bN' (beta) or 'rcN' - (release candidate) where N is an integer. - """ - return self.fetch()._prerelease - - @property - def release(self): - "Return the release tuple" - return self.fetch()._release - - @property - def commit(self): - "A specification for this particular VCS version, e.g. a short git SHA" - return self.fetch()._commit - - @property - def commit_count(self): - "Return the number of commits since the last release" - return self.fetch()._commit_count - - @property - def dirty(self): - "True if there are uncommited changes, False otherwise" - return self.fetch()._dirty - - def fetch(self): - """ - Returns a tuple of the major version together with the - appropriate SHA and dirty bit (for development version only). - """ - if self._release is not None: - return self - - self._release = self.expected_release - if not self.fpath: - self._commit = self._expected_commit - return self - - # Only git right now but easily extended to SVN, Mercurial, etc. - for cmd in ["git", "git.cmd", "git.exe"]: - try: - self.git_fetch(cmd) - break - except EnvironmentError: - pass - return self - - def git_fetch(self, cmd="git", as_string=False): - commit_argument = self._commit - output = None - try: - if self.reponame is not None: - # Verify this is the correct repository (since fpath could - # be an unrelated git repository, and autover could just have - # been copied/installed into it). - remotes = run_cmd([cmd, "remote", "-v"], cwd=os.path.dirname(self.fpath)) - repo_matches = [ - "/" + self.reponame + ".git", - # A remote 'server:reponame.git' can also be referred - # to (i.e. cloned) as `server:reponame`. - "/" + self.reponame + " ", - ] - if not any(m in remotes for m in repo_matches): - try: - output = self._output_from_file() - if output is not None: - self._update_from_vcs(output) - except: - pass - if output is None: - # glob pattern (not regexp) matching vX.Y.Z* tags - output = run_cmd( - [cmd, "describe", "--long", "--match", "v[0-9]*.[0-9]*.[0-9]*", "--dirty"], - cwd=os.path.dirname(self.fpath), - ) - if as_string: - return output - except Exception as e1: - try: - output = self._output_from_file() - if output is not None: - self._update_from_vcs(output) - if self._known_stale(): - self._commit_count = None - if as_string: - return output - - # If an explicit commit was supplied (e.g from git - # archive), it should take precedence over the file. - if commit_argument: - self._commit = commit_argument - return - - except IOError: - if e1.args[1] == "fatal: No names found, cannot describe anything.": - raise Exception("Cannot find any git version tags of format v*.*") - # If there is any other error, return (release value still useful) - return self - - self._update_from_vcs(output) - - def _known_stale(self): - """ - The commit is known to be from a file (and therefore stale) if a - SHA is supplied by git archive and doesn't match the parsed commit. - """ - if self._output_from_file() is None: - commit = None - else: - commit = self.commit - - known_stale = ( - self.archive_commit is not None - and not self.archive_commit.startswith("$Format") - and self.archive_commit != commit - ) - if known_stale: - self._commit_count = None - return known_stale - - def _output_from_file(self, entry="git_describe"): - """ - Read the version from a .version file that may exist alongside __init__.py. - - This file can be generated by piping the following output to file: - - git describe --long --match v*.* - """ - try: - vfile = os.path.join(os.path.dirname(self.fpath), ".version") - with open(vfile, "r") as f: - return json.loads(f.read()).get(entry, None) - except: # File may be missing if using pip + git archive - return None - - def _update_from_vcs(self, output): - "Update state based on the VCS state e.g the output of git describe" - split = output[1:].split("-") - dot_split = split[0].split(".") - for prefix in ["a", "b", "rc"]: - if prefix in dot_split[-1]: - prefix_split = dot_split[-1].split(prefix) - self._prerelease = prefix + prefix_split[-1] - dot_split[-1] = prefix_split[0] - - self._release = tuple(int(el) for el in dot_split) - self._commit_count = int(split[1]) - - self._commit = str(split[2][1:]) # Strip out 'g' prefix ('g'=>'git') - - self._dirty = split[-1] == "dirty" - return self - - def __str__(self): - """ - Version in x.y.z string format. Does not include the "v" - prefix of the VCS version tags, for pip compatibility. - - If the commit count is non-zero or the repository is dirty, - the string representation is equivalent to the output of:: - - git describe --long --match v*.* --dirty - - (with "v" prefix removed). - """ - known_stale = self._known_stale() - if self.release is None and not known_stale: - extracted_directory_tag = self._output_from_file(entry="extracted_directory_tag") - return "None" if extracted_directory_tag is None else extracted_directory_tag - elif self.release is None and known_stale: - extracted_directory_tag = self._output_from_file(entry="extracted_directory_tag") - if extracted_directory_tag is not None: - return extracted_directory_tag - return "0.0.0+g{SHA}-gitarchive".format(SHA=self.archive_commit) - - release = ".".join(str(el) for el in self.release) - prerelease = "" if self.prerelease is None else self.prerelease - - if self.commit_count == 0 and not self.dirty: - return release + prerelease - - commit = self.commit - dirty = "-dirty" if self.dirty else "" - archive_commit = "" - if known_stale: - archive_commit = "-gitarchive" - commit = self.archive_commit - - if archive_commit != "": - postcount = self.commit_count_prefix + "0" - elif self.commit_count not in [0, None]: - postcount = self.commit_count_prefix + str(self.commit_count) - else: - postcount = "" - - components = [release, prerelease, postcount, "" if commit is None else "+g" + commit, dirty, archive_commit] - return "".join(components) - - def __repr__(self): - return str(self) - - def abbrev(self): - """ - Abbreviated string representation of just the release number. - """ - return ".".join(str(el) for el in self.release) - - def verify(self, string_version=None): - """ - Check that the version information is consistent with the VCS - before doing a release. If supplied with a string version, - this is also checked against the current version. Should be - called from setup.py with the declared package version before - releasing to PyPI. - """ - if string_version and string_version != str(self): - raise Exception("Supplied string version does not match current version.") - - if self.dirty: - raise Exception("Current working directory is dirty.") - - if self.expected_release is not None and self.release != self.expected_release: - raise Exception("Declared release does not match current release tag.") - - if self.commit_count != 0: - raise Exception("Please update the VCS version tag before release.") - - if self._expected_commit is not None and not self._expected_commit.startswith("$Format"): - raise Exception("Declared release does not match the VCS version tag") - - @classmethod - def get_setup_version(cls, setup_path, reponame, describe=False, dirty="report", pkgname=None, archive_commit=None): - """ - Helper for use in setup.py to get the version from the .version file (if available) - or more up-to-date information from git describe (if available). - - Assumes the __init__.py will be found in the directory - {reponame}/__init__.py relative to setup.py unless pkgname is - explicitly specified in which case that name is used instead. - - If describe is True, the raw string obtained from git described is - returned which is useful for updating the .version file. - - The dirty policy can be one of 'report', 'strip', 'raise'. If it is - 'report' the version string may end in '-dirty' if the repository is - in a dirty state. If the policy is 'strip', the '-dirty' suffix - will be stripped out if present. If the policy is 'raise', an - exception is raised if the repository is in a dirty state. This can - be useful if you want to make sure packages are not built from a - dirty repository state. - """ - pkgname = reponame if pkgname is None else pkgname - policies = ["raise", "report", "strip"] - if dirty not in policies: - raise AssertionError("get_setup_version dirty policy must be in %r" % policies) - - fpath = os.path.join(setup_path, pkgname, "__init__.py") - version = Version(fpath=fpath, reponame=reponame, archive_commit=archive_commit) - if describe: - vstring = version.git_fetch(as_string=True) - else: - vstring = str(version) - - if version.dirty and dirty == "raise": - raise AssertionError("Repository is in a dirty state.") - elif version.dirty and dirty == "strip": - return vstring.replace("-dirty", "") - else: - return vstring - - @classmethod - def extract_directory_tag(cls, setup_path, reponame): - setup_dir = os.path.split(setup_path)[-1] # Directory containing setup.py - prefix = reponame + "-" # Prefix to match - if setup_dir.startswith(prefix): - tag = setup_dir[len(prefix) :] - # Assuming the tag is a version if it isn't empty, 'master' and has a dot in it - if tag not in ["", "master"] and ("." in tag): - return tag - return None - - @classmethod - def setup_version(cls, setup_path, reponame, archive_commit=None, pkgname=None, dirty="report"): - info = {} - git_describe = None - pkgname = reponame if pkgname is None else pkgname - try: - # Will only work if in a git repo and git is available - git_describe = Version.get_setup_version( - setup_path, reponame, describe=True, dirty=dirty, pkgname=pkgname, archive_commit=archive_commit - ) - - if git_describe is not None: - info["git_describe"] = git_describe - except: - pass - - if git_describe is None: - extracted_directory_tag = Version.extract_directory_tag(setup_path, reponame) - if extracted_directory_tag is not None: - info["extracted_directory_tag"] = extracted_directory_tag - try: - with open(os.path.join(setup_path, pkgname, ".version"), "w") as f: - f.write(json.dumps({"extracted_directory_tag": extracted_directory_tag})) - except: - print("Error in setup_version: could not write .version file.") - - info["version_string"] = Version.get_setup_version( - setup_path, reponame, describe=False, dirty=dirty, pkgname=pkgname, archive_commit=archive_commit - ) - try: - with open(os.path.join(setup_path, pkgname, ".version"), "w") as f: - f.write(json.dumps(info)) - except: - print("Error in setup_version: could not write .version file.") - - return info["version_string"] - - -def get_setup_version(location, reponame, pkgname=None, archive_commit=None): - """Helper for use in setup.py to get the current version from either - git describe or the .version file (if available). - - Set pkgname to the package name if it is different from the - repository name. - - To ensure git information is included in a git archive, add - setup.py to .gitattributes (in addition to __init__): - ``` - __init__.py export-subst - setup.py export-subst - ``` - Then supply "$Format:%h$" for archive_commit. - - """ - import warnings - - pkgname = reponame if pkgname is None else pkgname - if archive_commit is None: - warnings.warn("No archive commit available; git archives will not contain version information") - return Version.setup_version( - os.path.dirname(os.path.abspath(location)), reponame, pkgname=pkgname, archive_commit=archive_commit - ) - - -def get_setupcfg_version(): - """As get_setup_version(), but configure via setup.cfg. - - If your project uses setup.cfg to configure setuptools, and hence has - at least a "name" key in the [metadata] section, you can - set the version as follows: - ``` - [metadata] - name = mypackage - version = attr: autover.version.get_setup_version2 - ``` - - If the repository name is different from the package name, specify - `reponame` as a [tool:autover] option: - ``` - [tool:autover] - reponame = mypackage - ``` - - To ensure git information is included in a git archive, add - setup.cfg to .gitattributes (in addition to __init__): - ``` - __init__.py export-subst - setup.cfg export-subst - ``` - - Then add the following to setup.cfg: - ``` - [tool:autover.configparser_workaround.archive_commit=$Format:%h$] - ``` - - The above being a section heading rather than just a key is - because setuptools requires % to be escaped with %, or it can't - parse setup.cfg...but then git export-subst would not work. - - """ - try: - import configparser - except ImportError: - import ConfigParser as configparser # python2 (also prevents dict-like access) - import re - - cfg = "setup.cfg" - autover_section = "tool:autover" - config = configparser.ConfigParser() - config.read(cfg) - pkgname = config.get("metadata", "name") - reponame = ( - config.get(autover_section, "reponame", vars={"reponame": pkgname}) - if autover_section in config.sections() - else pkgname - ) - - ### - # hack archive_commit into section heading; see docstring - archive_commit = None - archive_commit_key = autover_section + ".configparser_workaround.archive_commit" - for section in config.sections(): - if section.startswith(archive_commit_key): - archive_commit = re.match(r".*=\s*(\S*)\s*", section).group(1) - ### - return get_setup_version(cfg, reponame=reponame, pkgname=pkgname, archive_commit=archive_commit) - - -# from param/version.py aa087db29976d9b7e0f59c29789dfd721c85afd0 -class OldDeprecatedVersion(object): - """ - A simple approach to Python package versioning that supports PyPI - releases and additional information when working with version - control. When obtaining a package from PyPI, the version returned - is a string-formatted rendering of the supplied release tuple. - For instance, release (1,0) tagged as ``v1.0`` in the version - control system will return ``1.0`` for ``str(__version__)``. Any - number of items can be supplied in the release tuple, with either - two or three numeric versioning levels typical. - - During development, a command like ``git describe`` will be used to - compute the number of commits since the last version tag, the - short commit hash, and whether the commit is dirty (has changes - not yet committed). Version tags must start with a lowercase 'v' - and have a period in them, e.g. v2.0, v0.9.8 or v0.1. - - Development versions are supported by setting the dev argument to an - appropriate dev version number. The corresponding tag can be PEP440 - compliant (using .devX) of the form v0.1.dev3, v1.9.0.dev2 etc but - it doesn't have to be as the dot may be omitted i.e v0.1dev3, - v1.9.0dev2 etc. - - Also note that when version control system (VCS) information is - used, the comparison operators take into account the number of - commits since the last version tag. This approach is often useful - in practice to decide which version is newer for a single - developer, but will not necessarily be reliable when comparing - against a different fork or branch in a distributed VCS. - - For git, if you want version control information available even in - an exported archive (e.g. a .zip file from GitHub), you can set - the following line in the .gitattributes file of your project:: - - __init__.py export-subst - """ - - def __init__(self, release=None, fpath=None, commit=None, reponame=None, dev=None, commit_count=0): - """ - :release: Release tuple (corresponding to the current VCS tag) - :commit Short SHA. Set to '$Format:%h$' for git archive support. - :fpath: Set to ``__file__`` to access version control information - :reponame: Used to verify VCS repository name. - :dev: Development version number. None if not a development version. - :commit_count Commits since last release. Set for dev releases. - """ - self.fpath = fpath - self._expected_commit = commit - self.expected_release = release - - self._commit = None if commit in [None, "$Format:%h$"] else commit - self._commit_count = commit_count - self._release = None - self._dirty = False - self.reponame = reponame - self.dev = dev - - @property - def release(self): - "Return the release tuple" - return self.fetch()._release - - @property - def commit(self): - "A specification for this particular VCS version, e.g. a short git SHA" - return self.fetch()._commit - - @property - def commit_count(self): - "Return the number of commits since the last release" - return self.fetch()._commit_count - - @property - def dirty(self): - "True if there are uncommited changes, False otherwise" - return self.fetch()._dirty - - def fetch(self): - """ - Returns a tuple of the major version together with the - appropriate SHA and dirty bit (for development version only). - """ - if self._release is not None: - return self - - self._release = self.expected_release - if not self.fpath: - self._commit = self._expected_commit - return self - - # Only git right now but easily extended to SVN, Mercurial, etc. - for cmd in ["git", "git.cmd", "git.exe"]: - try: - self.git_fetch(cmd) - break - except EnvironmentError: - pass - return self - - def git_fetch(self, cmd="git"): - try: - if self.reponame is not None: - # Verify this is the correct repository (since fpath could - # be an unrelated git repository, and param could just have - # been copied/installed into it). - output = run_cmd([cmd, "remote", "-v"], cwd=os.path.dirname(self.fpath)) - repo_matches = [ - "/" + self.reponame + ".git", - # A remote 'server:reponame.git' can also be referred - # to (i.e. cloned) as `server:reponame`. - "/" + self.reponame + " ", - ] - if not any(m in output for m in repo_matches): - return self - - output = run_cmd([cmd, "describe", "--long", "--match", "v*.*", "--dirty"], cwd=os.path.dirname(self.fpath)) - except Exception as e: - if e.args[1] == "fatal: No names found, cannot describe anything.": - raise Exception("Cannot find any git version tags of format v*.*") - # If there is any other error, return (release value still useful) - return self - - self._update_from_vcs(output) - - def _update_from_vcs(self, output): - "Update state based on the VCS state e.g the output of git describe" - split = output[1:].split("-") - if "dev" in split[0]: - dev_split = split[0].split("dev") - self.dev = int(dev_split[1]) - split[0] = dev_split[0] - # Remove the pep440 dot if present - if split[0].endswith("."): - split[0] = dev_split[0][:-1] - - self._release = tuple(int(el) for el in split[0].split(".")) - self._commit_count = int(split[1]) - self._commit = str(split[2][1:]) # Strip out 'g' prefix ('g'=>'git') - self._dirty = split[-1] == "dirty" - return self - - def __str__(self): - """ - Version in x.y.z string format. Does not include the "v" - prefix of the VCS version tags, for pip compatibility. - - If the commit count is non-zero or the repository is dirty, - the string representation is equivalent to the output of:: - - git describe --long --match v*.* --dirty - - (with "v" prefix removed). - """ - if self.release is None: - return "None" - release = ".".join(str(el) for el in self.release) - release = "%s.dev%d" % (release, self.dev) if self.dev is not None else release - - if (self._expected_commit is not None) and ("$Format" not in self._expected_commit): - pass # Concrete commit supplied - print full version string - elif self.commit_count == 0 and not self.dirty: - return release - - dirty_status = "-dirty" if self.dirty else "" - return "%s-%s-g%s%s" % (release, self.commit_count if self.commit_count else "x", self.commit, dirty_status) - - def __repr__(self): - return str(self) - - def abbrev(self, dev_suffix=""): - """ - Abbreviated string representation, optionally declaring whether it is - a development version. - """ - return ".".join(str(el) for el in self.release) + (dev_suffix if self.commit_count > 0 or self.dirty else "") - - def __eq__(self, other): - """ - Two versions are considered equivalent if and only if they are - from the same release, with the same commit count, and are not - dirty. Any dirty version is considered different from any - other version, since it could potentially have any arbitrary - changes even for the same release and commit count. - """ - if self.dirty or other.dirty: - return False - return (self.release, self.commit_count, self.dev) == (other.release, other.commit_count, other.dev) - - def __gt__(self, other): - if self.release == other.release: - if self.dev == other.dev: - return self.commit_count > other.commit_count - elif None in [self.dev, other.dev]: - return self.dev is None - else: - return self.dev > other.dev - else: - return (self.release, self.commit_count) > (other.release, other.commit_count) - - def __lt__(self, other): - if self == other: - return False - else: - return not (self > other) - - def verify(self, string_version=None): - """ - Check that the version information is consistent with the VCS - before doing a release. If supplied with a string version, - this is also checked against the current version. Should be - called from setup.py with the declared package version before - releasing to PyPI. - """ - if string_version and string_version != str(self): - raise Exception("Supplied string version does not match current version.") - - if self.dirty: - raise Exception("Current working directory is dirty.") - - if self.release != self.expected_release: - raise Exception("Declared release does not match current release tag.") - - if self.commit_count != 0: - raise Exception("Please update the VCS version tag before release.") - - if self._expected_commit not in [None, "$Format:%h$"]: - raise Exception("Declared release does not match the VCS version tag") diff --git a/hololinked/serializers/serializers.py b/hololinked/serializers/serializers.py index 8fd594f2..ebe71cb6 100644 --- a/hololinked/serializers/serializers.py +++ b/hololinked/serializers/serializers.py @@ -24,21 +24,23 @@ SOFTWARE. """ -import inspect import array import datetime -import io -import uuid import decimal +import inspect +import io +import json as pythonjson + +# serializers: +import pickle import typing +import uuid import warnings -from enum import Enum from collections import deque +from enum import Enum -# serializers: -import pickle -import json as pythonjson -from msgspec import json as msgspecjson, msgpack, Struct +from msgspec import Struct, msgpack +from msgspec import json as msgspecjson # default dytypes: try: @@ -46,15 +48,15 @@ except ImportError: pass +from ..constants import JSONSerializable from ..param.parameters import ( - TypeConstrainedList, - TypeConstrainedDict, - TypedKeyMappingsConstrainedDict, ClassSelector, - String, Parameter, + String, + TypeConstrainedDict, + TypeConstrainedList, + TypedKeyMappingsConstrainedDict, ) -from ..constants import JSONSerializable from ..utils import MappableSingleton, format_exception_as_json, issubklass @@ -201,11 +203,19 @@ def __init__(self) -> None: def dumps(self, data) -> bytes: "method called by ZMQ message brokers to serialize data" - return pickle.dumps(data) + from ..config import global_config + + if global_config.ALLOW_PICKLE: + return pickle.dumps(data) + raise RuntimeError("Pickle serialization is not allowed by the global configuration") def loads(self, data) -> typing.Any: "method called by ZMQ message brokers to deserialize data" - return pickle.loads(self.convert_to_bytes(data)) + from ..config import global_config + + if global_config.ALLOW_PICKLE: + return pickle.loads(self.convert_to_bytes(data)) + raise RuntimeError("Pickle deserialization is not allowed by the global configuration") @property def content_type(self) -> str: @@ -476,7 +486,7 @@ def register_for_object(cls, objekt: typing.Any, serializer: BaseSerializer) -> """ if not isinstance(serializer, BaseSerializer): raise ValueError("serializer must be an instance of BaseSerializer, given : {}".format(type(serializer))) - from ..core import Property, Action, Event, Thing + from ..core import Action, Event, Property, Thing if not isinstance(objekt, (Property, Action, Event)) and not issubklass(objekt, Thing): raise ValueError("object must be a Property, Action or Event, or Thing, got : {}".format(type(objekt))) @@ -513,7 +523,7 @@ def register_content_type_for_object(cls, objekt: typing.Any, content_type: str) """ if content_type not in cls.content_types: raise ValueError("content type {} unsupported".format(content_type)) - from ..core import Property, Action, Event, Thing + from ..core import Action, Event, Property, Thing if not isinstance(objekt, (Property, Action, Event)) and not issubklass(objekt, Thing): raise ValueError("object must be a Property, Action or Event, got : {}".format(type(objekt))) @@ -551,7 +561,7 @@ def register_content_type_for_object_per_thing_instance( """ if content_type not in cls.content_types: raise ValueError("content type {} unsupported".format(content_type)) - from ..core import Property, Action, Event + from ..core import Action, Event, Property if not isinstance(objekt, (Property, Action, Event, str)): raise ValueError("object must be a Property, Action or Event, got : {}".format(type(objekt))) diff --git a/hololinked/server/http/__init__.py b/hololinked/server/http/__init__.py index 18bc5164..9c55beb0 100644 --- a/hololinked/server/http/__init__.py +++ b/hololinked/server/http/__init__.py @@ -3,11 +3,9 @@ import ssl import typing import warnings - from copy import deepcopy import structlog - from pydantic import BaseModel from tornado import ioloop from tornado.httpserver import HTTPServer as TornadoHTTP1Server @@ -144,7 +142,8 @@ def __init__( port: int, default 8080 the port at which the server should be run address: str, default 0.0.0.0 - IP address + IP address, use 0.0.0.0 to bind to all interfaces to expose the server to other devices in the network + and 127.0.0.1 to bind only to localhost logger: logging.Logger, optional logging.Logger instance log_level: int diff --git a/hololinked/server/http/handlers.py b/hololinked/server/http/handlers.py index afef9d78..4ab5f780 100644 --- a/hololinked/server/http/handlers.py +++ b/hololinked/server/http/handlers.py @@ -3,7 +3,6 @@ import uuid import msgspec - from msgspec import DecodeError as MsgspecJSONDecodeError from tornado.iostream import StreamClosedError from tornado.web import RequestHandler, StaticFileHandler @@ -35,7 +34,6 @@ from ...td.forms import Form from ...utils import format_exception_as_json, get_current_async_loop - try: from ..security import BcryptBasicSecurity except ImportError: @@ -76,12 +74,11 @@ def initialize( metadata: typing.Optional[typing.Dict[str, typing.Any]] additional metadata about the resource, like allowed HTTP methods """ - from . import HTTPServer + from . import HTTPServer # noqa: F401 - assert isinstance(owner_inst, HTTPServer) self.resource = resource self.schema_validator = None # self.server.schema_validator # not supported yet - self.server = owner_inst + self.server = owner_inst # type: HTTPServer self.zmq_client_pool = self.server.zmq_client_pool self.logger = self.server.logger.bind( resource=self.resource.name, @@ -659,10 +656,9 @@ class StopHandler(BaseHandler): """Stops the tornado HTTP server""" def initialize(self, owner_inst=None) -> None: - from . import HTTPServer + from . import HTTPServer # noqa: F401 - assert isinstance(owner_inst, HTTPServer) - self.server = owner_inst + self.server = owner_inst # type: HTTPServer self.allowed_clients = self.server.allowed_clients self.security_schemes = self.server.security_schemes self.logger = self.server.logger.bind(path=self.request.path) @@ -691,10 +687,9 @@ class LivenessProbeHandler(BaseHandler): """Liveness probe handler""" def initialize(self, owner_inst=None) -> None: - from . import HTTPServer + from . import HTTPServer # noqa: F401 - assert isinstance(owner_inst, HTTPServer) - self.server = owner_inst + self.server = owner_inst # type: HTTPServer self.logger = self.server.logger.bind(path=self.request.path) async def get(self): @@ -705,10 +700,9 @@ async def get(self): class ReadinessProbeHandler(BaseHandler): def initialize(self, owner_inst=None) -> None: - from . import HTTPServer + from . import HTTPServer # noqa: F401 - assert isinstance(owner_inst, HTTPServer) - self.server = owner_inst + self.server = owner_inst # type: HTTPServer self.logger = self.server.logger.bind(path=self.request.path) async def get(self): diff --git a/hololinked/server/utils.py b/hololinked/server/utils.py index 19dcbf72..e9eb2ef3 100644 --- a/hololinked/server/utils.py +++ b/hololinked/server/utils.py @@ -1,12 +1,13 @@ -from typing import Any, Optional +import logging import uuid +from typing import Any, Optional + import zmq.asyncio -import logging from ..config import global_config from ..constants import Operations -from ..core import Thing, Action -from ..core.zmq import AsyncZMQClient, AsyncEventConsumer +from ..core import Thing +from ..core.zmq import AsyncEventConsumer, AsyncZMQClient from ..td.interaction_affordance import EventAffordance @@ -61,7 +62,7 @@ async def consume_broker_queue( await client.handshake_complete(10000) # fetch ZMQ INPROC TD - assert isinstance(Thing.get_thing_model, Action) # type definition + Thing.get_thing_model # type: Action FetchTMAffordance = Thing.get_thing_model.to_affordance() FetchTMAffordance.override_defaults(thing_id=thing_id, name="get_thing_description") fetch_td = ZMQAction( diff --git a/hololinked/td/data_schema.py b/hololinked/td/data_schema.py index bcb95c15..1bdeeea8 100644 --- a/hololinked/td/data_schema.py +++ b/hololinked/td/data_schema.py @@ -1,29 +1,30 @@ from typing import Any, ClassVar, Optional -from pydantic import BaseModel, Field, ConfigDict, RootModel -from .base import Schema -from .utils import get_summary -from ..utils import issubklass +from pydantic import BaseModel, ConfigDict, Field, RootModel + from ..constants import JSON, JSONSerializable -from ..schema_validators.json_schema import JSONSchema +from ..core import Property from ..core.properties import ( - String, - Number, - Integer, Boolean, + ClassSelector, + Filename, + Foldername, + Integer, List, - TypedList, + Number, + Path, + Selector, + String, Tuple, TupleSelector, - Selector, TypedDict, TypedKeyMappingsDict, - ClassSelector, - Filename, - Foldername, - Path, + TypedList, ) -from ..core import Property +from ..schema_validators.json_schema import JSONSchema +from ..utils import issubklass +from .base import Schema +from .utils import get_summary class DataSchema(Schema): @@ -57,9 +58,6 @@ def __init__(self): def ds_build_fields_from_property(self, property: Property) -> None: """populates schema information from descriptor object""" - assert isinstance(property, Property), ( - f"only Property is a subclass of dataschema, given type: {type(property)}" - ) self.title = get_summary(property.doc) if property.constant: self.const = property.constant @@ -92,12 +90,6 @@ def ds_build_from_property(self, property: Property) -> None: generates the schema specific to the type, calls `ds_build_fields_from_property()` after choosing the right type """ - assert isinstance(property, Property) - - if not isinstance(property, Property): - raise TypeError( - f"Property affordance schema can only be generated for Property. Given type {type(property)}" - ) if self._custom_schema_generators.get(property, NotImplemented) is not NotImplemented: data_schema = self._custom_schema_generators[property]() elif isinstance(property, Property) and property.model is not None: @@ -436,9 +428,8 @@ def __init__(self): def ds_build_fields_from_property(self, property) -> None: """generates the schema""" - assert isinstance(property, Selector), ( - f"EnumSchema compatible property is only Selector, not {property.__class__}" - ) + if not isinstance(property, Selector): + raise TypeError(f"EnumSchema compatible property is only Selector, not {property.__class__}") self.enum = list(property.objects) super().ds_build_fields_from_property(property) diff --git a/hololinked/td/interaction_affordance.py b/hololinked/td/interaction_affordance.py index 46058a6f..d8f2c211 100644 --- a/hololinked/td/interaction_affordance.py +++ b/hololinked/td/interaction_affordance.py @@ -1,21 +1,21 @@ -import typing import copy +import typing from enum import Enum from typing import ClassVar, Optional -from pydantic import ConfigDict -from pydantic import BaseModel, RootModel -from .base import Schema -from .data_schema import DataSchema -from .forms import Form -from .utils import get_summary -from ..utils import issubklass +from pydantic import BaseModel, ConfigDict, RootModel + from ..constants import JSON, ResourceTypes -from ..core.property import Property from ..core.actions import Action from ..core.events import Event +from ..core.property import Property from ..core.thing import Thing, ThingMeta +from ..utils import issubklass +from .base import Schema +from .data_schema import DataSchema +from .forms import Form from .pydantic_extensions import type_to_dataschema +from .utils import get_summary class InteractionAffordance(Schema): @@ -340,7 +340,8 @@ def build(self) -> None: @classmethod def generate(cls, property, owner=None): - assert isinstance(property, Property), f"property must be instance of Property, given type {type(property)}" + if not isinstance(property, Property): + raise TypeError(f"property must be instance of Property, given type {type(property)}") affordance = PropertyAffordance() affordance.owner = owner affordance.objekt = property @@ -372,8 +373,7 @@ def what(self): return ResourceTypes.ACTION def build(self) -> None: - action = self.objekt - assert isinstance(action, Action) # type definition + action = self.objekt # type: Action if action.obj.__doc__: title = get_summary(action.obj.__doc__) description = self.format_doc(action.obj.__doc__) @@ -416,6 +416,8 @@ def build(self) -> None: @classmethod def generate(cls, action: Action, owner, **kwargs) -> "ActionAffordance": + if not isinstance(action, Action): + raise TypeError(f"action must be instance of Action, given type {type(action)}") affordance = ActionAffordance() affordance.owner = owner affordance.objekt = action @@ -444,8 +446,7 @@ def what(self): return ResourceTypes.EVENT def build(self) -> None: - event = self.objekt - assert isinstance(event, Event) # type definition + event = self.objekt # type: Event if event.__doc__: title = get_summary(event.doc) description = self.format_doc(event.doc) @@ -464,6 +465,8 @@ def build(self) -> None: @classmethod def generate(cls, event: Event, owner, **kwargs) -> "EventAffordance": + if not isinstance(event, Event): + raise TypeError(f"event must be instance of Event, given type {type(event)}") affordance = EventAffordance() affordance.owner = owner affordance.objekt = event diff --git a/pyproject.toml b/pyproject.toml index 8cc0bcdb..eb0c6297 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,6 +90,9 @@ test = [ "pytest-cov>=4.0.0", "pytest-order>=1.0.0", "pytest-asyncio>=1.3.0", +] +scanning = [ + "bandit>=1.9.1", ] linux = [ "uvloop==0.20.0" @@ -121,4 +124,17 @@ filterwarnings = [ exclude = [ "hololinked/core/properties.py", "hololinked/param" +] + +[tool.bandit] +exclude_dirs = [ + ".venv", + ".ruff_cache", + "__pycache__", + ".vscode", + ".github", + "doc", + "examples", + "licenses", + "tests" ] \ No newline at end of file diff --git a/uv.lock b/uv.lock index 977ce71a..a5d884aa 100644 --- a/uv.lock +++ b/uv.lock @@ -152,6 +152,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4c/1c/ff6546b6c12603d8dd1070aa3c3d273ad4c07f5771689a7b69a550e8c951/backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255", size = 11157 }, ] +[[package]] +name = "bandit" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "pyyaml" }, + { name = "rich" }, + { name = "stevedore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/d5/82fc87a82ad9536215c1b5693bbb675439f6f2d0c2fca74b2df2cb9db925/bandit-1.9.1.tar.gz", hash = "sha256:6dbafd1a51e276e065404f06980d624bad142344daeac3b085121fcfd117b7cf", size = 4241552 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/82/249a7710242b7a05f7f4245a0da3cdd4042e4377f5d00059619fa2b941f3/bandit-1.9.1-py3-none-any.whl", hash = "sha256:0a1f34c04f067ee28985b7854edaa659c9299bd71e1b7e18236e46cccc79720b", size = 134216 }, +] + [[package]] name = "bcrypt" version = "4.3.0" @@ -627,6 +642,7 @@ source = { editable = "." } dependencies = [ { name = "aiomqtt" }, { name = "argon2-cffi" }, + { name = "bandit" }, { name = "httpx" }, { name = "ifaddr" }, { name = "jsonschema" }, @@ -659,6 +675,9 @@ dev = [ linux = [ { name = "uvloop" }, ] +scanning = [ + { name = "bandit" }, +] test = [ { name = "bcrypt" }, { name = "coverage" }, @@ -676,6 +695,7 @@ test = [ requires-dist = [ { name = "aiomqtt", specifier = ">=2.4.0" }, { name = "argon2-cffi", specifier = ">=23.1.0" }, + { name = "bandit", specifier = ">=1.9.1" }, { name = "httpx", specifier = ">=0.28.1,<29.0" }, { name = "ifaddr", specifier = ">=0.2.0,<0.3" }, { name = "jsonschema", specifier = ">=4.22.0,<5.0" }, @@ -706,6 +726,7 @@ dev = [ { name = "serpent", specifier = "==1.41" }, ] linux = [{ name = "uvloop", specifier = "==0.20.0" }] +scanning = [{ name = "bandit" }] test = [ { name = "bcrypt", specifier = "==4.3.0" }, { name = "coverage", specifier = "==7.8.0" }, @@ -1139,6 +1160,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/82/3d/14ce75ef66813643812f3093ab17e46d3a206942ce7376d31ec2d36229e7/lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12", size = 113151 }, ] +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321 }, +] + [[package]] name = "markupsafe" version = "3.0.3" @@ -1225,6 +1258,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516 }, ] +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + [[package]] name = "mistune" version = "3.1.4" @@ -2146,6 +2188,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/71/44ce230e1b7fadd372515a97e32a83011f906ddded8d03e3c6aafbdedbb7/rfc3987_syntax-1.1.0-py3-none-any.whl", hash = "sha256:6c3d97604e4c5ce9f714898e05401a0445a641cfa276432b0a648c80856f6a3f", size = 8046 }, ] +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393 }, +] + [[package]] name = "rpds-py" version = "0.28.0" @@ -2397,6 +2452,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 }, ] +[[package]] +name = "stevedore" +version = "5.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/5b/496f8abebd10c3301129abba7ddafd46c71d799a70c44ab080323987c4c9/stevedore-5.6.0.tar.gz", hash = "sha256:f22d15c6ead40c5bbfa9ca54aa7e7b4a07d59b36ae03ed12ced1a54cf0b51945", size = 516074 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/40/8561ce06dc46fd17242c7724ab25b257a2ac1b35f4ebf551b40ce6105cfa/stevedore-5.6.0-py3-none-any.whl", hash = "sha256:4a36dccefd7aeea0c70135526cecb7766c4c84c473b1af68db23d541b6dc1820", size = 54428 }, +] + [[package]] name = "structlog" version = "25.5.0"