From ffc9e1618c5dd3103e8afe075951ca4901ea8016 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 9 Sep 2025 16:48:56 -0400 Subject: [PATCH 01/53] prototype cli interface for managing koi nodes --- pyproject.toml | 3 ++ src/koi_net/cli/__init__.py | 1 + src/koi_net/cli/commands.py | 99 +++++++++++++++++++++++++++++++++++++ src/koi_net/cli/models.py | 41 +++++++++++++++ 4 files changed, 144 insertions(+) create mode 100644 src/koi_net/cli/__init__.py create mode 100644 src/koi_net/cli/commands.py create mode 100644 src/koi_net/cli/models.py diff --git a/pyproject.toml b/pyproject.toml index 7d2569c..57e8871 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,5 +30,8 @@ examples = [ "rich", ] +[project.scripts] +koi = "koi_net.cli:app" + [project.urls] Homepage = "https://github.com/BlockScience/koi-net/" \ No newline at end of file diff --git a/src/koi_net/cli/__init__.py b/src/koi_net/cli/__init__.py new file mode 100644 index 0000000..c032984 --- /dev/null +++ b/src/koi_net/cli/__init__.py @@ -0,0 +1 @@ +from .commands import app \ No newline at end of file diff --git a/src/koi_net/cli/commands.py b/src/koi_net/cli/commands.py new file mode 100644 index 0000000..dc73541 --- /dev/null +++ b/src/koi_net/cli/commands.py @@ -0,0 +1,99 @@ +import os +import typer +from typing import Callable +from rich.console import Console +from rich.table import Table + +from importlib.metadata import entry_points + +from koi_net.cli.models import KoiNetworkConfig +from koi_net.core import NodeInterface +import shutil + +app = typer.Typer() +console = Console() + +installed_nodes = entry_points(group='koi_net.node') + +net_config = KoiNetworkConfig.load_from_yaml() + +@app.command() +def list_node_types(): + table = Table(title="installed node types") + table.add_column("name", style="cyan") + table.add_column("module", style="magenta") + + for node in installed_nodes: + table.add_row(node.name, node.module) + console.print(table) + +@app.command() +def list_nodes(): + table = Table(title="created nodes") + table.add_column("name", style="cyan") + table.add_column("rid", style="magenta") + + for dir in os.listdir('.'): + if not os.path.isdir(dir): + continue + for file in os.listdir(dir): + file_path = os.path.join(dir, file) + if not (os.path.isfile(file_path) and file == "config.yaml"): + continue + + print(os.getcwd()) + os.chdir(dir) + print(os.getcwd()) + + node_type = net_config.nodes.get(dir) + + ep = list(installed_nodes.select(name=node_type))[0] + create_node: Callable[[], NodeInterface] = ep.load() + + node = create_node() + + print(ep) + print(dir) + print(node.identity.rid) + + table.add_row(dir, str(node.identity.rid)) + + os.chdir('..') + print(os.getcwd()) + + console.print(table) + +@app.command() +def create(type: str, name: str): + # if name not in installed_nodes: + # console.print(f"[bold red]Error:[/bold red] node type '{name}' doesn't exist") + # raise typer.Exit(code=1) + + eps = installed_nodes.select(name=type) + if eps: + ep = list(eps)[0] + + os.mkdir(name) + os.chdir(name) + + ep.load() + + os.chdir('..') + + net_config.nodes[name] = type + net_config.save_to_yaml() + +@app.command() +def remove(name: str): + shutil.rmtree(name) + net_config.nodes.pop(name, None) + net_config.save_to_yaml() + +@app.command() +def start(name: str): + os.chdir(name) + node_type = net_config.nodes.get(name) + ep = list(installed_nodes.select(name=node_type))[0] + create_node: Callable[[], NodeInterface] = ep.load() + + create_node().server.run() \ No newline at end of file diff --git a/src/koi_net/cli/models.py b/src/koi_net/cli/models.py new file mode 100644 index 0000000..dfb4093 --- /dev/null +++ b/src/koi_net/cli/models.py @@ -0,0 +1,41 @@ +from pydantic import BaseModel, Field, PrivateAttr +from ruamel.yaml import YAML + + +class KoiNetworkConfig(BaseModel): + nodes: dict[str, str] = Field(default_factory=dict) + _file_path: str = PrivateAttr(default="koi-net-config.yaml") + + @classmethod + def load_from_yaml( + cls, + file_path: str = "koi-net-config.yaml", + ): + yaml = YAML() + + try: + with open(file_path, "r") as f: + file_content = f.read() + config_data = yaml.load(file_content) + config = cls.model_validate(config_data) + + except FileNotFoundError: + config = cls() + + config._file_path = file_path + config.save_to_yaml() + return config + + def save_to_yaml(self): + yaml = YAML() + + with open(self._file_path, "w") as f: + try: + config_data = self.model_dump(mode="json") + yaml.dump(config_data, f) + except Exception as e: + if self._file_content: + f.seek(0) + f.truncate() + f.write(self._file_content) + raise e \ No newline at end of file From 469f3bfda3eb21612bbfff01c751aa790255b23c Mon Sep 17 00:00:00 2001 From: lukvmil Date: Thu, 18 Sep 2025 14:57:33 -0400 Subject: [PATCH 02/53] reorganizing for dependency injection --- .gitignore | 4 +- container_test.py | 4 + examples/coordinator.py | 4 +- examples/partial.py | 14 +- pyproject.toml | 1 + src/koi_net/__init__.py | 2 +- src/koi_net/cache_adapter.py | 10 + src/koi_net/core.py | 321 +++++++++++++---------------- src/koi_net/effector.py | 25 ++- src/koi_net/network/event_queue.py | 3 - src/koi_net/network/resolver.py | 4 - uv.lock | 28 +++ 12 files changed, 217 insertions(+), 203 deletions(-) create mode 100644 container_test.py create mode 100644 src/koi_net/cache_adapter.py diff --git a/.gitignore b/.gitignore index eb47159..3706b3c 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,6 @@ venv .env prototypes .vscode -dist/ \ No newline at end of file +dist/ +docs/ +tests/ \ No newline at end of file diff --git a/container_test.py b/container_test.py new file mode 100644 index 0000000..21c9f79 --- /dev/null +++ b/container_test.py @@ -0,0 +1,4 @@ +from koi_net.core import NodeContainer + +node = NodeContainer() +# node. \ No newline at end of file diff --git a/examples/coordinator.py b/examples/coordinator.py index 5abef99..ce2c483 100644 --- a/examples/coordinator.py +++ b/examples/coordinator.py @@ -4,7 +4,7 @@ from rid_lib.types import KoiNetNode, KoiNetEdge from koi_net.config import NodeConfig, KoiNetConfig from koi_net.protocol.node import NodeProfile, NodeProvides, NodeType -from koi_net import NodeInterface +from koi_net import NodeContainer from koi_net.context import HandlerContext from koi_net.processor.handler import HandlerType from koi_net.processor.knowledge_object import KnowledgeObject @@ -38,7 +38,7 @@ class CoordinatorConfig(NodeConfig): ) ) -node = NodeInterface( +node = NodeContainer( config=CoordinatorConfig.load_from_yaml("coordinator_config.yaml"), use_kobj_processor_thread=True ) diff --git a/examples/partial.py b/examples/partial.py index 20ff481..449e928 100644 --- a/examples/partial.py +++ b/examples/partial.py @@ -1,7 +1,8 @@ import logging from pydantic import Field +from dependency_injector.providers import Factory from rich.logging import RichHandler -from koi_net import NodeInterface +from koi_net import NodeContainer from koi_net.protocol.node import NodeProfile, NodeType from koi_net.config import NodeConfig, KoiNetConfig @@ -29,9 +30,12 @@ class PartialNodeConfig(NodeConfig): ) ) -node = NodeInterface( - config=PartialNodeConfig.load_from_yaml("partial_config.yaml") -) +class PartialNodeContainer(NodeContainer): + config = Factory( + PartialNodeConfig.load_from_yaml, + "partial_config.yaml" + ) + if __name__ == "__main__": - node.poller.run() \ No newline at end of file + PartialNodeContainer().poller().run() \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 94ca887..17a60f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ dependencies = [ "fastapi>=0.115.12", "uvicorn>=0.34.2", "rich>=14.1.0", + "dependency-injector>=4.48.1", ] [project.optional-dependencies] diff --git a/src/koi_net/__init__.py b/src/koi_net/__init__.py index 61c995d..db33c75 100644 --- a/src/koi_net/__init__.py +++ b/src/koi_net/__init__.py @@ -1 +1 @@ -from .core import NodeInterface \ No newline at end of file +from .core import NodeContainer \ No newline at end of file diff --git a/src/koi_net/cache_adapter.py b/src/koi_net/cache_adapter.py new file mode 100644 index 0000000..8abc936 --- /dev/null +++ b/src/koi_net/cache_adapter.py @@ -0,0 +1,10 @@ +from rid_lib.ext import Cache +from koi_net.config import NodeConfig + +class CacheProvider(Cache): + def __init__(self, config: NodeConfig): + self.config = config + + @property + def directory_path(self): + return self.config.koi_net.cache_directory_path \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index cdb7338..cfc3225 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,6 +1,12 @@ import logging from typing import Generic, TypeVar +from dependency_injector.providers import Factory, Self, Dependency +from dependency_injector.containers import DeclarativeContainer + from rid_lib.ext import Cache + +from koi_net.cache_adapter import CacheProvider + from .network.resolver import NetworkResolver from .network.event_queue import NetworkEventQueue from .network.graph import NetworkGraph @@ -25,9 +31,9 @@ logger = logging.getLogger(__name__) -T = TypeVar("T", bound=NodeConfig) +# T = TypeVar("T", bound=NodeConfig) -class NodeInterface(Generic[T]): +class NodeContainer(DeclarativeContainer): """Interface for a node's subsystems. This class embodies a node, and wires up all of its subsystems to @@ -38,178 +44,141 @@ class NodeInterface(Generic[T]): passing new class implementations into `__init__`. """ - config: T - cache: Cache - identity: NodeIdentity - effector: Effector - graph: NetworkGraph - secure: Secure - request_handler: RequestHandler - response_handler: ResponseHandler - resolver: NetworkResolver - event_queue: NetworkEventQueue - actor: Actor - action_context: ActionContext - handler_context: HandlerContext - pipeline: KnowledgePipeline - processor: ProcessorInterface - error_handler: ErrorHandler - lifecycle: NodeLifecycle - server: NodeServer - poller: NodePoller - - use_kobj_processor_thread: bool - - def __init__( - self, - config: T, - use_kobj_processor_thread: bool = False, - handlers: list[KnowledgeHandler] | None = None, - - # optional overrides - CacheOverride: type[Cache] | None = None, - NodeIdentityOverride: type[NodeIdentity] | None = None, - EffectorOverride: type[Effector] | None = None, - NetworkGraphOverride: type[NetworkGraph] | None = None, - SecureOverride: type[Secure] | None = None, - RequestHandlerOverride: type[RequestHandler] | None = None, - ResponseHandlerOverride: type[ResponseHandler] | None = None, - NetworkResolverOverride: type[NetworkResolver] | None = None, - NetworkEventQueueOverride: type[NetworkEventQueue] | None = None, - ActorOverride: type[Actor] | None = None, - ActionContextOverride: type[ActionContext] | None = None, - HandlerContextOverride: type[HandlerContext] | None = None, - KnowledgePipelineOverride: type[KnowledgePipeline] | None = None, - ProcessorInterfaceOverride: type[ProcessorInterface] | None = None, - ErrorHandlerOverride: type[ErrorHandler] | None = None, - NodeLifecycleOverride: type[NodeLifecycle] | None = None, - NodeServerOverride: type[NodeServer] | None = None, - NodePollerOverride: type[NodePoller] | None = None, - ): - self.use_kobj_processor_thread = use_kobj_processor_thread - - self.config = config - self.cache = (CacheOverride or Cache)( - directory_path=self.config.koi_net.cache_directory_path - ) - - self.identity = (NodeIdentityOverride or NodeIdentity)(config=self.config) - self.effector = (EffectorOverride or Effector)(cache=self.cache) - - self.graph = (NetworkGraphOverride or NetworkGraph)( - cache=self.cache, - identity=self.identity - ) - - self.secure = (SecureOverride or Secure)( - identity=self.identity, - effector=self.effector, - config=self.config - ) - - self.request_handler = (RequestHandlerOverride or RequestHandler)( - effector=self.effector, - identity=self.identity, - secure=self.secure - ) - - self.response_handler = (ResponseHandlerOverride or ResponseHandler)(self.cache, self.effector) - - self.resolver = (NetworkResolverOverride or NetworkResolver)( - config=self.config, - cache=self.cache, - identity=self.identity, - graph=self.graph, - request_handler=self.request_handler, - effector=self.effector - ) + config = Factory( + NodeConfig.load_from_yaml + ) + + cache = Factory( + CacheProvider, + config=config + ) + + identity = Factory( + NodeIdentity, + config=config + ) + + effector = Factory( + Effector, + cache=cache, + resolver=Self, + processor=Self, + action_context=Self + ) + + graph = Factory( + NetworkGraph, + cache=cache, + identity=identity + ) + + secure = Factory( + Secure, + identity=identity, + effector=effector, + config=config + ) + + request_handler = Factory( + RequestHandler, + effector=effector, + identity=identity, + secure=secure + ) + + response_handler = Factory( + ResponseHandler, + cache=cache, + effector=effector + ) + + + resolver = Factory( + NetworkResolver, + config=config, + cache=cache, + identity=identity, + graph=graph, + request_handler=request_handler + ) - self.event_queue = (NetworkEventQueueOverride or NetworkEventQueue)( - config=self.config, - cache=self.cache, - identity=self.identity, - graph=self.graph, - request_handler=self.request_handler, - effector=self.effector - ) - - self.actor = (ActorOverride or Actor)() - - # pull all handlers defined in default_handlers module - if handlers is None: - handlers = [ - obj for obj in vars(default_handlers).values() - if isinstance(obj, KnowledgeHandler) - ] - - self.action_context = (ActionContextOverride or ActionContext)( - identity=self.identity, - effector=self.effector - ) - - self.handler_context = (HandlerContextOverride or HandlerContext)( - identity=self.identity, - config=self.config, - cache=self.cache, - event_queue=self.event_queue, - graph=self.graph, - request_handler=self.request_handler, - resolver=self.resolver, - effector=self.effector - ) - - self.pipeline = (KnowledgePipelineOverride or KnowledgePipeline)( - handler_context=self.handler_context, - cache=self.cache, - request_handler=self.request_handler, - event_queue=self.event_queue, - graph=self.graph, - default_handlers=handlers - ) - - self.processor = (ProcessorInterfaceOverride or ProcessorInterface)( - pipeline=self.pipeline, - use_kobj_processor_thread=self.use_kobj_processor_thread - ) - - self.error_handler = (ErrorHandlerOverride or ErrorHandler)( - processor=self.processor, - actor=self.actor - ) - - self.request_handler.set_error_handler(self.error_handler) - - self.handler_context.set_processor(self.processor) - - self.effector.set_processor(self.processor) - self.effector.set_resolver(self.resolver) - self.effector.set_action_context(self.action_context) - - self.actor.set_ctx(self.handler_context) - - self.lifecycle = (NodeLifecycleOverride or NodeLifecycle)( - config=self.config, - identity=self.identity, - graph=self.graph, - processor=self.processor, - effector=self.effector, - actor=self.actor, - use_kobj_processor_thread=use_kobj_processor_thread - ) - - # if self.config.koi_net.node_profile.node_type == NodeType.FULL: - self.server = (NodeServerOverride or NodeServer)( - config=self.config, - lifecycle=self.lifecycle, - secure=self.secure, - processor=self.processor, - event_queue=self.event_queue, - response_handler=self.response_handler - ) - - self.poller = (NodePollerOverride or NodePoller)( - processor=self.processor, - lifecycle=self.lifecycle, - resolver=self.resolver, - config=self.config - ) + event_queue = Factory( + NetworkEventQueue, + config=config, + cache=cache, + identity=identity, + graph=graph, + request_handler=request_handler, + effector=effector + ) + + actor = Factory(Actor) + + action_context = Factory( + ActionContext, + identity=identity, + effector=effector + ) + + handler_context = Factory( + HandlerContext, + identity=identity, + config=config, + cache=cache, + event_queue=event_queue, + graph=graph, + request_handler=request_handler, + resolver=resolver, + effector=effector + ) + + pipeline = Factory( + KnowledgePipeline, + handler_context=handler_context, + cache=cache, + request_handler=request_handler, + event_queue=event_queue, + graph=graph, + default_handlers=[] # deal with default handlers + ) + + processor = Factory( + ProcessorInterface, + pipeline=pipeline, + use_kobj_processor_thread=True # resolve this with to implementations? + ) + + error_handler = Factory( + ErrorHandler, + processor=processor, + actor=actor + ) + + lifecycle = Factory( + NodeLifecycle, + config=config, + identity=identity, + graph=graph, + processor=processor, + effector=effector, + actor=actor, + use_kobj_processor_thread=True + ) + + server = Factory( + NodeServer, + config=config, + lifecycle=lifecycle, + secure=secure, + processor=processor, + event_queue=event_queue, + response_handler=response_handler + ) + + poller = Factory( + NodePoller, + processor=processor, + lifecycle=lifecycle, + resolver=resolver, + config=config + ) \ No newline at end of file diff --git a/src/koi_net/effector.py b/src/koi_net/effector.py index da51e56..8e45f08 100644 --- a/src/koi_net/effector.py +++ b/src/koi_net/effector.py @@ -4,11 +4,11 @@ from rid_lib.ext import Cache, Bundle from rid_lib.core import RID, RIDType from rid_lib.types import KoiNetNode +from .network.resolver import NetworkResolver from typing import TYPE_CHECKING if TYPE_CHECKING: - from .network.resolver import NetworkResolver from .processor.interface import ProcessorInterface from .context import ActionContext @@ -23,7 +23,7 @@ class Effector: """Subsystem for dereferencing RIDs.""" cache: Cache - resolver: "NetworkResolver | None" + resolver: NetworkResolver processor: "ProcessorInterface | None" action_context: "ActionContext | None" _action_table: dict[ @@ -37,21 +37,24 @@ class Effector: def __init__( self, cache: Cache, + resolver: "NetworkResolver", + processor: "ProcessorInterface", + action_context: "ActionContext" ): self.cache = cache - self.resolver = None - self.processor = None - self.action_context = None + self.resolver = resolver + self.processor = processor + self.action_context = action_context self._action_table = self.__class__._action_table.copy() - def set_processor(self, processor: "ProcessorInterface"): - self.processor = processor + # def set_processor(self, processor: "ProcessorInterface"): + # self.processor = processor - def set_resolver(self, resolver: "NetworkResolver"): - self.resolver = resolver + # def set_resolver(self, resolver: "NetworkResolver"): + # self.resolver = resolver - def set_action_context(self, action_context: "ActionContext"): - self.action_context = action_context + # def set_action_context(self, action_context: "ActionContext"): + # self.action_context = action_context @classmethod def register_default_action(cls, rid_type: RIDType): diff --git a/src/koi_net/network/event_queue.py b/src/koi_net/network/event_queue.py index 4f52552..d6d9922 100644 --- a/src/koi_net/network/event_queue.py +++ b/src/koi_net/network/event_queue.py @@ -30,7 +30,6 @@ class NetworkEventQueue: config: NodeConfig identity: NodeIdentity effector: Effector - cache: Cache graph: NetworkGraph request_handler: RequestHandler poll_event_queue: EventQueue @@ -39,7 +38,6 @@ class NetworkEventQueue: def __init__( self, config: NodeConfig, - cache: Cache, identity: NodeIdentity, effector: Effector, graph: NetworkGraph, @@ -47,7 +45,6 @@ def __init__( ): self.config = config self.identity = identity - self.cache = cache self.graph = graph self.request_handler = request_handler self.effector = effector diff --git a/src/koi_net/network/resolver.py b/src/koi_net/network/resolver.py index 5cc12c8..d80635a 100644 --- a/src/koi_net/network/resolver.py +++ b/src/koi_net/network/resolver.py @@ -12,7 +12,6 @@ from ..protocol.api_models import ErrorResponse from ..identity import NodeIdentity from ..config import NodeConfig -from ..effector import Effector logger = logging.getLogger(__name__) @@ -22,7 +21,6 @@ class NetworkResolver: config: NodeConfig identity: NodeIdentity - effector: Effector cache: Cache graph: NetworkGraph request_handler: RequestHandler @@ -32,7 +30,6 @@ def __init__( config: NodeConfig, cache: Cache, identity: NodeIdentity, - effector: Effector, graph: NetworkGraph, request_handler: RequestHandler, ): @@ -41,7 +38,6 @@ def __init__( self.cache = cache self.graph = graph self.request_handler = request_handler - self.effector = effector self.poll_event_queue = dict() self.webhook_event_queue = dict() diff --git a/uv.lock b/uv.lock index cc4bff2..9f74552 100644 --- a/uv.lock +++ b/uv.lock @@ -309,6 +309,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/23/87/7ce86f3fa14bc11a5a48c30d8103c26e09b6465f8d8e9d74cf7a0714f043/cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63", size = 3332908, upload-time = "2025-09-01T11:14:58.78Z" }, ] +[[package]] +name = "dependency-injector" +version = "4.48.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/7c/5062c4a7ffd32bf210ff55fab9d279a5beeae350fb09533d3536811e13b6/dependency_injector-4.48.1.tar.gz", hash = "sha256:1805185e4522effad6d5e348c255d27e80d3f8adc89701daf13d743367392978", size = 1100885, upload-time = "2025-06-20T10:21:52.248Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/f9/c9b77652f724aece8856e281f7a71e5af544049b3c068df70c68868e43be/dependency_injector-4.48.1-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:a6f73011d532f3ea59689aad85c7999be6da3f30393041a745d5861cdcdc02e4", size = 1631637, upload-time = "2025-06-20T10:21:24.729Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f0/d91c9cdabb1f2354762aca588757d1aa341f3cbccbc8636dd2c06acac10b/dependency_injector-4.48.1-cp38-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ac09f508fa9aee06a036ebf3e3d3b2a210276aba1993e9993cec7f1fdc5fd89e", size = 1855944, upload-time = "2025-06-20T10:21:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/57/ee/d69c4758a12653edbe6ee15c0bf4195981c9820650a1cfa762cbb838485b/dependency_injector-4.48.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b05a4a980096b53ad90a87965c5450183bfbb8bbe36615d7cea97537086d622", size = 1811989, upload-time = "2025-06-20T10:21:28.293Z" }, + { url = "https://files.pythonhosted.org/packages/cf/6d/d2a257402c8c3f7a9c61f1b8a0482ec4373f1ef7fdfe784a91e883506e3b/dependency_injector-4.48.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0506e98440ee6c48fe660016d602961b1b3ecc0a8227838a2221048ed11e2fca", size = 1826408, upload-time = "2025-06-20T10:21:29.789Z" }, + { url = "https://files.pythonhosted.org/packages/65/f9/2a408d460eedb264f7ea919754c526c8f3a18c026496cacb7dd6960766d2/dependency_injector-4.48.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1994622eae8917138626303b176cba4c74e625ba1e588cb09d673ca175d299a2", size = 1863948, upload-time = "2025-06-20T10:21:31.951Z" }, + { url = "https://files.pythonhosted.org/packages/6e/8a/2edaef77e725dd8b1a625c84cbccb0f445afe58277c7b243cbf58784826a/dependency_injector-4.48.1-cp38-abi3-win32.whl", hash = "sha256:58d4d81f92e3267c331f160cbbb517fd7644b95ee57a0d6e4b01f53a7e437a4a", size = 1516768, upload-time = "2025-06-20T10:21:33.747Z" }, + { url = "https://files.pythonhosted.org/packages/8c/41/4bf523af7e1b7f367499f8b8709e0e807e9a14c7d1674b0442d7f84403c8/dependency_injector-4.48.1-cp38-abi3-win_amd64.whl", hash = "sha256:572b22b7db9b103718ea52634b5ca1ef763278338310254334f4633a57c9f0e7", size = 1639850, upload-time = "2025-06-20T10:21:35.639Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b9/203a1cb19cc4ed42748dceb53d9cafe42ee34928f2d5c18cbe5f30d6a573/dependency_injector-4.48.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9a7862987b3dcab5ac4fd82f6bbda55d3b15af1ca7492757c428deccc3720140", size = 1732856, upload-time = "2025-06-20T10:21:37.459Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/7bea5ea904465b4d04c7e3cddf669079d9abb0902d75b05417b5f884c570/dependency_injector-4.48.1-pp310-pypy310_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb33c6d6b6100564dfaee20f3f76a2756158cceff6499e9d0bca8290f8e5f124", size = 1822529, upload-time = "2025-06-20T10:21:39.363Z" }, + { url = "https://files.pythonhosted.org/packages/97/3d/7b16ec2cd0f4e7bba380084a713395d1483baa67e7ac63338f7b8a9a30a8/dependency_injector-4.48.1-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a659380386bd236579b7f82f51e97f6074d0c878a10db5b50086000de6ce3c28", size = 1731129, upload-time = "2025-06-20T10:21:41.241Z" }, + { url = "https://files.pythonhosted.org/packages/16/26/bf4612e9adf60fdbfd97360663d2b39ab17bd4308c7294dcfcd54546c701/dependency_injector-4.48.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:76774369c7268d5dd211af75abfcb4433d972760be90db342c2de325ee4c24a0", size = 1616736, upload-time = "2025-06-20T10:21:43.134Z" }, + { url = "https://files.pythonhosted.org/packages/04/de/92b98b96742fbc9c04273729cb14c744a97a8dc2ee3e0d12a0d3cc3945e2/dependency_injector-4.48.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:51f8d9d78a1a147908ed7929df628d859251a814e6a001973bd96ae2b5648760", size = 1734103, upload-time = "2025-06-20T10:21:44.748Z" }, + { url = "https://files.pythonhosted.org/packages/70/b7/31061c32c7d3e1f6c3e1fc71eb37d2ba4134e9bb2e50ad558bbff4aad9fa/dependency_injector-4.48.1-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c23ab17cd3e160de1fc5d78719bf86fbfc81c21c8ea02b43832a6a1e2c8a8d8", size = 1826656, upload-time = "2025-06-20T10:21:46.663Z" }, + { url = "https://files.pythonhosted.org/packages/57/a7/00b2a6e8769f3a5b248edf0b0d503289eb3516fa192f4e4cb368163f4a71/dependency_injector-4.48.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79be912a0dedb1341b1400018defca6a9966fcb8d4a84b325623fa57d3c08171", size = 1734610, upload-time = "2025-06-20T10:21:48.271Z" }, + { url = "https://files.pythonhosted.org/packages/43/b3/aa73fe301cf4dc006d2d6d82b6bf2f9a5776854f20e0aaa1122fa4fd1f2f/dependency_injector-4.48.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:65d9cf9f4eb31f837ed387210158e0003a4509478de1cbdc56c8439232f22ecd", size = 1618844, upload-time = "2025-06-20T10:21:50.271Z" }, +] + [[package]] name = "docutils" version = "0.21.2" @@ -504,6 +530,7 @@ version = "1.1.0" source = { editable = "." } dependencies = [ { name = "cryptography" }, + { name = "dependency-injector" }, { name = "fastapi" }, { name = "httpx" }, { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, @@ -534,6 +561,7 @@ docs = [ requires-dist = [ { name = "build", marker = "extra == 'dev'" }, { name = "cryptography", specifier = ">=45.0.3" }, + { name = "dependency-injector", specifier = ">=4.48.1" }, { name = "fastapi", specifier = ">=0.115.12" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "networkx", specifier = ">=3.4.2" }, From 73010a31e0cc8bbd01120fb20d265fc9d76c4155 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Thu, 18 Sep 2025 16:56:30 -0400 Subject: [PATCH 03/53] factored effector (for now), added new providers for list of knowledge handlers --- src/koi_net/actor.py | 6 +-- src/koi_net/context.py | 7 --- src/koi_net/core.py | 58 +++++++++++++---------- src/koi_net/lifecycle.py | 16 ++++--- src/koi_net/network/event_queue.py | 11 ++--- src/koi_net/network/request_handler.py | 10 ++-- src/koi_net/network/response_handler.py | 8 +--- src/koi_net/processor/default_handlers.py | 2 +- src/koi_net/secure.py | 11 ++--- src/koi_net/utils.py | 18 +++++++ 10 files changed, 82 insertions(+), 65 deletions(-) create mode 100644 src/koi_net/utils.py diff --git a/src/koi_net/actor.py b/src/koi_net/actor.py index 5a0f5ef..5cdcb69 100644 --- a/src/koi_net/actor.py +++ b/src/koi_net/actor.py @@ -15,9 +15,7 @@ class Actor: Functions defined here used by multiple subsystems. """ - ctx: HandlerContext - - def set_ctx(self, ctx: HandlerContext): + def __init__(self, ctx: HandlerContext): self.ctx = ctx def handshake_with(self, target: KoiNetNode): @@ -38,7 +36,7 @@ def handshake_with(self, target: KoiNetNode): self.ctx.event_queue.push_event_to( event=Event.from_bundle( event_type=EventType.NEW, - bundle=self.ctx.effector.deref(self.ctx.identity.rid)), + bundle=self.ctx.cache.read(self.ctx.identity.rid)), node=target ) diff --git a/src/koi_net/context.py b/src/koi_net/context.py index ee46b21..88ccadb 100644 --- a/src/koi_net/context.py +++ b/src/koi_net/context.py @@ -2,7 +2,6 @@ from koi_net.network.resolver import NetworkResolver from .config import NodeConfig -from .effector import Effector from .network.graph import NetworkGraph from .network.event_queue import NetworkEventQueue from .network.request_handler import RequestHandler @@ -14,15 +13,12 @@ class ActionContext: """Provides action handlers access to other subsystems.""" identity: NodeIdentity - effector: Effector def __init__( self, identity: NodeIdentity, - effector: Effector ): self.identity = identity - self.effector = effector class HandlerContext: @@ -35,7 +31,6 @@ class HandlerContext: graph: NetworkGraph request_handler: RequestHandler resolver: NetworkResolver - effector: Effector _processor: ProcessorInterface | None def __init__( @@ -47,7 +42,6 @@ def __init__( graph: NetworkGraph, request_handler: RequestHandler, resolver: NetworkResolver, - effector: Effector ): self.identity = identity self.config = config @@ -56,7 +50,6 @@ def __init__( self.graph = graph self.request_handler = request_handler self.resolver = resolver - self.effector = effector self._processor = None def set_processor(self, processor: ProcessorInterface): diff --git a/src/koi_net/core.py b/src/koi_net/core.py index cfc3225..cce75dc 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,6 +1,6 @@ import logging from typing import Generic, TypeVar -from dependency_injector.providers import Factory, Self, Dependency +from dependency_injector.providers import Factory, Self, Dependency, Callable, List, Object from dependency_injector.containers import DeclarativeContainer from rid_lib.ext import Cache @@ -58,13 +58,13 @@ class NodeContainer(DeclarativeContainer): config=config ) - effector = Factory( - Effector, - cache=cache, - resolver=Self, - processor=Self, - action_context=Self - ) + # effector = Factory( + # Effector, + # cache=cache, + # resolver=Self, + # processor=Self, + # action_context=Self + # ) graph = Factory( NetworkGraph, @@ -75,24 +75,22 @@ class NodeContainer(DeclarativeContainer): secure = Factory( Secure, identity=identity, - effector=effector, + cache=cache, config=config ) request_handler = Factory( RequestHandler, - effector=effector, + cache=cache, identity=identity, secure=secure ) response_handler = Factory( ResponseHandler, - cache=cache, - effector=effector + cache=cache ) - resolver = Factory( NetworkResolver, config=config, @@ -108,18 +106,26 @@ class NodeContainer(DeclarativeContainer): cache=cache, identity=identity, graph=graph, - request_handler=request_handler, - effector=effector + request_handler=request_handler ) - actor = Factory(Actor) - action_context = Factory( - ActionContext, - identity=identity, - effector=effector + knowledge_handlers = List( + Object(default_handlers.basic_rid_handler), + Object(default_handlers.basic_manifest_handler), + Object(default_handlers.secure_profile_handler), + Object(default_handlers.edge_negotiation_handler), + Object(default_handlers.coordinator_contact), + Object(default_handlers.basic_network_output_filter), + Object(default_handlers.forget_edge_on_node_deletion) ) + # action_context = Factory( + # ActionContext, + # identity=identity, + # cache=cache + # ) + handler_context = Factory( HandlerContext, identity=identity, @@ -128,8 +134,12 @@ class NodeContainer(DeclarativeContainer): event_queue=event_queue, graph=graph, request_handler=request_handler, - resolver=resolver, - effector=effector + resolver=resolver + ) + + actor = Factory( + Actor, + ctx=handler_context ) pipeline = Factory( @@ -139,7 +149,7 @@ class NodeContainer(DeclarativeContainer): request_handler=request_handler, event_queue=event_queue, graph=graph, - default_handlers=[] # deal with default handlers + default_handlers=knowledge_handlers ) processor = Factory( @@ -160,7 +170,7 @@ class NodeContainer(DeclarativeContainer): identity=identity, graph=graph, processor=processor, - effector=effector, + cache=cache, actor=actor, use_kobj_processor_thread=True ) diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index fac444e..988de2d 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -1,10 +1,10 @@ import logging from contextlib import contextmanager, asynccontextmanager +from rid_lib.ext import Bundle, Cache from rid_lib.types import KoiNetNode from .actor import Actor -from .effector import Effector from .config import NodeConfig from .processor.interface import ProcessorInterface from .network.graph import NetworkGraph @@ -20,7 +20,7 @@ class NodeLifecycle: identity: NodeIdentity graph: NetworkGraph processor: ProcessorInterface - effector: Effector + cache: Cache actor: Actor use_kobj_processor_thread: bool @@ -30,7 +30,7 @@ def __init__( identity: NodeIdentity, graph: NetworkGraph, processor: ProcessorInterface, - effector: Effector, + cache: Cache, actor: Actor, use_kobj_processor_thread: bool ): @@ -38,7 +38,7 @@ def __init__( self.identity = identity self.graph = graph self.processor = processor - self.effector = effector + self.cache = cache self.actor = actor self.use_kobj_processor_thread = use_kobj_processor_thread @@ -82,8 +82,12 @@ def start(self): self.graph.generate() - # refresh to reflect changes (if any) in config.yaml - self.effector.deref(self.identity.rid, refresh_cache=True) + # refresh to reflect changes (if any) in config.yaml + + self.processor.handle(bundle=Bundle.generate( + rid=self.identity.rid, + contents=self.identity.profile.model_dump() + )) logger.debug("Waiting for kobj queue to empty") if self.use_kobj_processor_thread: diff --git a/src/koi_net/network/event_queue.py b/src/koi_net/network/event_queue.py index d6d9922..d85c1d1 100644 --- a/src/koi_net/network/event_queue.py +++ b/src/koi_net/network/event_queue.py @@ -13,7 +13,6 @@ from ..protocol.event import Event from ..identity import NodeIdentity from ..config import NodeConfig -from ..effector import Effector logger = logging.getLogger(__name__) @@ -29,7 +28,7 @@ class NetworkEventQueue: config: NodeConfig identity: NodeIdentity - effector: Effector + cache: Cache graph: NetworkGraph request_handler: RequestHandler poll_event_queue: EventQueue @@ -39,7 +38,7 @@ def __init__( self, config: NodeConfig, identity: NodeIdentity, - effector: Effector, + cache: Cache, graph: NetworkGraph, request_handler: RequestHandler, ): @@ -47,7 +46,7 @@ def __init__( self.identity = identity self.graph = graph self.request_handler = request_handler - self.effector = effector + self.cache = cache self.poll_event_queue = dict() self.webhook_event_queue = dict() @@ -102,7 +101,7 @@ def push_event_to(self, event: Event, node: KoiNetNode, flush=False): """ logger.debug(f"Pushing event {event.event_type} {event.rid!r} to {node}") - node_bundle = self.effector.deref(node) + node_bundle = self.cache.read(node) # if there's an edge from me to the target node, override broadcast type edge_rid = self.graph.get_edge( @@ -110,7 +109,7 @@ def push_event_to(self, event: Event, node: KoiNetNode, flush=False): target=node ) - edge_bundle = self.effector.deref(edge_rid) if edge_rid else None + edge_bundle = self.cache.read(edge_rid) if edge_rid else None if edge_bundle: logger.debug(f"Found edge from me to {node!r}") diff --git a/src/koi_net/network/request_handler.py b/src/koi_net/network/request_handler.py index 1c17383..3909be2 100644 --- a/src/koi_net/network/request_handler.py +++ b/src/koi_net/network/request_handler.py @@ -1,6 +1,7 @@ import logging import httpx from rid_lib import RID +from rid_lib.ext import Cache from rid_lib.types.koi_net_node import KoiNetNode from ..identity import NodeIdentity @@ -27,7 +28,6 @@ ) from ..protocol.node import NodeProfile, NodeType from ..secure import Secure -from ..effector import Effector from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -57,18 +57,18 @@ class UnknownPathError(Exception): class RequestHandler: """Handles making requests to other KOI nodes.""" - effector: Effector + cache: Cache identity: NodeIdentity secure: Secure error_handler: "ErrorHandler" def __init__( self, - effector: Effector, + cache: Cache, identity: NodeIdentity, secure: Secure ): - self.effector = effector + self.cache = cache self.identity = identity self.secure = secure @@ -84,7 +84,7 @@ def get_url(self, node_rid: KoiNetNode) -> str: if node_rid == self.identity.rid: raise SelfRequestError("Don't talk to yourself") - node_bundle = self.effector.deref(node_rid) + node_bundle = self.cache.read(node_rid) if node_bundle: node_profile = node_bundle.validate_contents(NodeProfile) diff --git a/src/koi_net/network/response_handler.py b/src/koi_net/network/response_handler.py index 2e5c3fe..702cddf 100644 --- a/src/koi_net/network/response_handler.py +++ b/src/koi_net/network/response_handler.py @@ -12,7 +12,6 @@ FetchManifests, FetchBundles, ) -from ..effector import Effector logger = logging.getLogger(__name__) @@ -21,15 +20,12 @@ class ResponseHandler: """Handles generating responses to requests from other KOI nodes.""" cache: Cache - effector: Effector def __init__( self, cache: Cache, - effector: Effector, ): self.cache = cache - self.effector = effector def fetch_rids(self, req: FetchRids, source: KoiNetNode) -> RidsPayload: """Returns response to fetch RIDs request.""" @@ -46,7 +42,7 @@ def fetch_manifests(self, req: FetchManifests, source: KoiNetNode) -> ManifestsP not_found: list[RID] = [] for rid in (req.rids or self.cache.list_rids(req.rid_types)): - bundle = self.effector.deref(rid) + bundle = self.cache.read(rid) if bundle: manifests.append(bundle.manifest) else: @@ -62,7 +58,7 @@ def fetch_bundles(self, req: FetchBundles, source: KoiNetNode) -> BundlesPayload not_found: list[RID] = [] for rid in req.rids: - bundle = self.effector.deref(rid) + bundle = self.cache.read(rid) if bundle: bundles.append(bundle) else: diff --git a/src/koi_net/processor/default_handlers.py b/src/koi_net/processor/default_handlers.py index 28b95a8..a3d5861 100644 --- a/src/koi_net/processor/default_handlers.py +++ b/src/koi_net/processor/default_handlers.py @@ -107,7 +107,7 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): logger.debug("Handling edge negotiation") peer_rid = edge_profile.target - peer_bundle = ctx.effector.deref(peer_rid) + peer_bundle = ctx.cache.read(peer_rid) if not peer_bundle: logger.warning(f"Peer {peer_rid!r} unknown to me") diff --git a/src/koi_net/secure.py b/src/koi_net/secure.py index 98f0044..a742dd6 100644 --- a/src/koi_net/secure.py +++ b/src/koi_net/secure.py @@ -2,7 +2,7 @@ from functools import wraps import cryptography.exceptions -from rid_lib.ext import Bundle +from rid_lib.ext import Bundle, Cache from rid_lib.ext.utils import sha256_hash from rid_lib.types import KoiNetNode from .identity import NodeIdentity @@ -18,7 +18,6 @@ InvalidSignatureError, InvalidTargetError ) -from .effector import Effector from .config import NodeConfig logger = logging.getLogger(__name__) @@ -27,18 +26,18 @@ class Secure: """Subsystem handling secure protocol logic.""" identity: NodeIdentity - effector: Effector + cache: Cache config: NodeConfig priv_key: PrivateKey def __init__( self, identity: NodeIdentity, - effector: Effector, + cache: Cache, config: NodeConfig ): self.identity = identity - self.effector = effector + self.cache = cache self.config = config self.priv_key = self._load_priv_key() @@ -89,7 +88,7 @@ def validate_envelope(self, envelope: SignedEnvelope): """Validates signed envelope from another node.""" node_bundle = ( - self.effector.deref(envelope.source_node) or + self.cache.read(envelope.source_node) or self._handle_unknown_node(envelope) ) diff --git a/src/koi_net/utils.py b/src/koi_net/utils.py new file mode 100644 index 0000000..43f0db8 --- /dev/null +++ b/src/koi_net/utils.py @@ -0,0 +1,18 @@ +from typing import Callable + +from rid_lib import RID +from rid_lib.ext import Bundle, Cache + +cache = Cache() + +def build_dereferencer( + *funcs: list[Callable[[RID], Bundle | None]] +) -> Callable[[RID], Bundle | None]: + def any_of(rid: RID): + return any( + f(rid) for f in funcs + ) + return any_of + +deref = build_dereferencer(cache.read) +deref(RID.from_string("string:hello_world")) \ No newline at end of file From ecf26e06f2ad837b2a39d84b7dd3be6572ce8d5c Mon Sep 17 00:00:00 2001 From: lukvmil Date: Mon, 22 Sep 2025 16:48:41 -0400 Subject: [PATCH 04/53] set up knowledge and event worker threads, continuing to experiment with DI framework. trying to factor out the "actor" class into utils --- examples/coordinator.py | 4 +- src/koi_net/actor.py | 73 -------- src/koi_net/behaviors.py | 80 +++++++++ src/koi_net/context.py | 14 +- src/koi_net/core.py | 132 +++++++++----- src/koi_net/effector.py | 8 +- src/koi_net/kobj_worker.py | 45 +++++ src/koi_net/lifecycle.py | 71 +++++--- src/koi_net/models.py | 14 ++ src/koi_net/network/error_handler.py | 19 +- src/koi_net/network/event_queue.py | 189 +------------------- src/koi_net/poll_event_buffer.py | 17 ++ src/koi_net/poller.py | 11 +- src/koi_net/processor/event_worker.py | 119 ++++++++++++ src/koi_net/processor/interface.py | 106 ----------- src/koi_net/processor/knowledge_pipeline.py | 6 +- src/koi_net/processor/kobj_queue.py | 51 ++++++ src/koi_net/server.py | 21 +-- src/koi_net/worker.py | 10 ++ 19 files changed, 511 insertions(+), 479 deletions(-) delete mode 100644 src/koi_net/actor.py create mode 100644 src/koi_net/behaviors.py create mode 100644 src/koi_net/kobj_worker.py create mode 100644 src/koi_net/models.py create mode 100644 src/koi_net/poll_event_buffer.py create mode 100644 src/koi_net/processor/event_worker.py delete mode 100644 src/koi_net/processor/interface.py create mode 100644 src/koi_net/processor/kobj_queue.py create mode 100644 src/koi_net/worker.py diff --git a/examples/coordinator.py b/examples/coordinator.py index ce2c483..3e941be 100644 --- a/examples/coordinator.py +++ b/examples/coordinator.py @@ -43,7 +43,7 @@ class CoordinatorConfig(NodeConfig): use_kobj_processor_thread=True ) -@node.processor.pipeline.register_handler(HandlerType.Network, rid_types=[KoiNetNode]) +@node.kobj_queue_manager.pipeline.register_handler(HandlerType.Network, rid_types=[KoiNetNode]) def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): logger.info("Handling node handshake") @@ -55,7 +55,7 @@ def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): identity_bundle = ctx.effector.deref(ctx.identity.rid) ctx.event_queue.push_event_to( event=Event.from_bundle(EventType.NEW, identity_bundle), - node=kobj.rid, + target=kobj.rid, flush=True ) diff --git a/src/koi_net/actor.py b/src/koi_net/actor.py deleted file mode 100644 index 5cdcb69..0000000 --- a/src/koi_net/actor.py +++ /dev/null @@ -1,73 +0,0 @@ -from logging import getLogger -from rid_lib.types import KoiNetNode -from rid_lib import RIDType -from koi_net.context import HandlerContext -from koi_net.protocol.api_models import ErrorResponse -from .protocol.event import Event, EventType - - -logger = getLogger(__name__) - - -class Actor: - """Basic node actions. - - Functions defined here used by multiple subsystems. - """ - - def __init__(self, ctx: HandlerContext): - self.ctx = ctx - - def handshake_with(self, target: KoiNetNode): - """Initiates a handshake with target node. - - Pushes successive `FORGET` and `NEW` events to target node to - reset the target's cache in case it already knew this node. - """ - - logger.debug(f"Initiating handshake with {target}") - self.ctx.event_queue.push_event_to( - Event.from_rid( - event_type=EventType.FORGET, - rid=self.ctx.identity.rid), - node=target - ) - - self.ctx.event_queue.push_event_to( - event=Event.from_bundle( - event_type=EventType.NEW, - bundle=self.ctx.cache.read(self.ctx.identity.rid)), - node=target - ) - - self.ctx.event_queue.flush_webhook_queue(target) - - def identify_coordinators(self) -> list[KoiNetNode]: - """Returns node's providing state for `orn:koi-net.node`.""" - return self.ctx.resolver.get_state_providers(KoiNetNode) - - def catch_up_with(self, target: KoiNetNode, rid_types: list[RIDType] = []): - """Fetches and processes knowledge objects from target node. - - Args: - target: Node to catch up with - rid_types: RID types to fetch from target (all types if list is empty) - """ - logger.debug(f"catching up with {target} on {rid_types or 'all types'}") - - payload = self.ctx.request_handler.fetch_manifests( - node=target, - rid_types=rid_types - ) - if type(payload) == ErrorResponse: - logger.debug("failed to reach node") - return - - for manifest in payload.manifests: - if manifest.rid == self.ctx.identity.rid: - continue - - self.ctx.handle( - manifest=manifest, - source=target - ) \ No newline at end of file diff --git a/src/koi_net/behaviors.py b/src/koi_net/behaviors.py new file mode 100644 index 0000000..c835aea --- /dev/null +++ b/src/koi_net/behaviors.py @@ -0,0 +1,80 @@ +from logging import getLogger +from rid_lib.ext import Cache +from rid_lib.types import KoiNetNode +from rid_lib import RIDType +from koi_net.identity import NodeIdentity +from koi_net.network.event_queue import EventQueue +from koi_net.network.request_handler import RequestHandler +from koi_net.network.resolver import NetworkResolver +from koi_net.processor.kobj_queue import KobjQueue +from koi_net.protocol.api_models import ErrorResponse +from .protocol.event import Event, EventType + + +logger = getLogger(__name__) + + +def handshake_with( + cache: Cache, + identity: NodeIdentity, + event_queue: EventQueue, + target: KoiNetNode +): + """Initiates a handshake with target node. + + Pushes successive `FORGET` and `NEW` events to target node to + reset the target's cache in case it already knew this node. + """ + + logger.debug(f"Initiating handshake with {target}") + event_queue.push_event_to( + Event.from_rid( + event_type=EventType.FORGET, + rid=identity.rid), + target=target + ) + + event_queue.push_event_to( + event=Event.from_bundle( + event_type=EventType.NEW, + bundle=cache.read(identity.rid)), + target=target + ) + + # self.ctx.event_queue.flush_webhook_queue(target) + +def identify_coordinators(resolver: NetworkResolver) -> list[KoiNetNode]: + """Returns node's providing state for `orn:koi-net.node`.""" + return resolver.get_state_providers(KoiNetNode) + +def catch_up_with( + request_handler: RequestHandler, + identity: NodeIdentity, + kobj_queue: KobjQueue, + target: KoiNetNode, + rid_types: list[RIDType] = [] +): + """Fetches and processes knowledge objects from target node. + + Args: + target: Node to catch up with + rid_types: RID types to fetch from target (all types if list is empty) + """ + logger.debug(f"catching up with {target} on {rid_types or 'all types'}") + + payload = request_handler.fetch_manifests( + node=target, + rid_types=rid_types + ) + if type(payload) == ErrorResponse: + logger.debug("failed to reach node") + return + + for manifest in payload.manifests: + if manifest.rid == identity.rid: + continue + + kobj_queue.put_kobj( + manifest=manifest, + source=target + ) \ No newline at end of file diff --git a/src/koi_net/context.py b/src/koi_net/context.py index 88ccadb..237d551 100644 --- a/src/koi_net/context.py +++ b/src/koi_net/context.py @@ -3,10 +3,10 @@ from koi_net.network.resolver import NetworkResolver from .config import NodeConfig from .network.graph import NetworkGraph -from .network.event_queue import NetworkEventQueue +from .network.event_queue import EventQueue from .network.request_handler import RequestHandler from .identity import NodeIdentity -from .processor.interface import ProcessorInterface +from .processor.kobj_queue import KobjQueue class ActionContext: @@ -27,18 +27,18 @@ class HandlerContext: identity: NodeIdentity config: NodeConfig cache: Cache - event_queue: NetworkEventQueue + event_queue: EventQueue graph: NetworkGraph request_handler: RequestHandler resolver: NetworkResolver - _processor: ProcessorInterface | None + _processor: KobjQueue | None def __init__( self, identity: NodeIdentity, config: NodeConfig, cache: Cache, - event_queue: NetworkEventQueue, + event_queue: EventQueue, graph: NetworkGraph, request_handler: RequestHandler, resolver: NetworkResolver, @@ -52,9 +52,9 @@ def __init__( self.resolver = resolver self._processor = None - def set_processor(self, processor: ProcessorInterface): + def set_processor(self, processor: KobjQueue): self._processor = processor @property def handle(self): - return self._processor.handle \ No newline at end of file + return self._processor.put_kobj \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index cce75dc..4f50c68 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,20 +1,21 @@ import logging -from typing import Generic, TypeVar -from dependency_injector.providers import Factory, Self, Dependency, Callable, List, Object -from dependency_injector.containers import DeclarativeContainer +from dependency_injector.providers import Factory, Callable, List, Object, Singleton +from dependency_injector.containers import DeclarativeContainer, WiringConfiguration from rid_lib.ext import Cache from koi_net.cache_adapter import CacheProvider +from koi_net.poll_event_buffer import PollEventBuffer +from koi_net.processor.event_worker import EventProcessingWorker +from koi_net.kobj_worker import KnowledgeProcessingWorker from .network.resolver import NetworkResolver -from .network.event_queue import NetworkEventQueue +from .network.event_queue import EventQueue from .network.graph import NetworkGraph from .network.request_handler import RequestHandler from .network.response_handler import ResponseHandler from .network.error_handler import ErrorHandler -from .actor import Actor -from .processor.interface import ProcessorInterface +from .processor.kobj_queue import KobjQueue from .processor import default_handlers from .processor.handler import KnowledgeHandler from .processor.knowledge_pipeline import KnowledgePipeline @@ -27,6 +28,8 @@ from .lifecycle import NodeLifecycle from .poller import NodePoller from . import default_actions +from . import behaviors +from .behaviors import handshake_with, identify_coordinators, catch_up_with logger = logging.getLogger(__name__) @@ -44,21 +47,30 @@ class NodeContainer(DeclarativeContainer): passing new class implementations into `__init__`. """ - config = Factory( + wiring_config = WiringConfiguration( + modules=["koi_net.behaviors"] + ) + + poll_event_buf = Singleton(PollEventBuffer) + + kobj_queue = Singleton(KobjQueue) + event_queue = Singleton(EventQueue) + + config = Singleton( NodeConfig.load_from_yaml ) - cache = Factory( + cache = Singleton( CacheProvider, config=config ) - identity = Factory( + identity = Singleton( NodeIdentity, config=config ) - # effector = Factory( + # effector = Singleton( # Effector, # cache=cache, # resolver=Self, @@ -66,32 +78,32 @@ class NodeContainer(DeclarativeContainer): # action_context=Self # ) - graph = Factory( + graph = Singleton( NetworkGraph, cache=cache, identity=identity ) - secure = Factory( + secure = Singleton( Secure, identity=identity, cache=cache, config=config ) - request_handler = Factory( + request_handler = Singleton( RequestHandler, cache=cache, identity=identity, secure=secure ) - response_handler = Factory( + response_handler = Singleton( ResponseHandler, cache=cache ) - resolver = Factory( + resolver = Singleton( NetworkResolver, config=config, cache=cache, @@ -99,16 +111,6 @@ class NodeContainer(DeclarativeContainer): graph=graph, request_handler=request_handler ) - - event_queue = Factory( - NetworkEventQueue, - config=config, - cache=cache, - identity=identity, - graph=graph, - request_handler=request_handler - ) - knowledge_handlers = List( Object(default_handlers.basic_rid_handler), @@ -120,13 +122,13 @@ class NodeContainer(DeclarativeContainer): Object(default_handlers.forget_edge_on_node_deletion) ) - # action_context = Factory( + # action_context = Singleton( # ActionContext, # identity=identity, # cache=cache # ) - handler_context = Factory( + handler_context = Singleton( HandlerContext, identity=identity, config=config, @@ -137,12 +139,12 @@ class NodeContainer(DeclarativeContainer): resolver=resolver ) - actor = Factory( - Actor, - ctx=handler_context - ) + # actor = Singleton( + # Actor, + # ctx=handler_context + # ) - pipeline = Factory( + pipeline = Singleton( KnowledgePipeline, handler_context=handler_context, cache=cache, @@ -152,42 +154,74 @@ class NodeContainer(DeclarativeContainer): default_handlers=knowledge_handlers ) - processor = Factory( - ProcessorInterface, - pipeline=pipeline, - use_kobj_processor_thread=True # resolve this with to implementations? + kobj_worker = Singleton( + KnowledgeProcessingWorker, + kobj_queue=kobj_queue, + pipeline=pipeline ) - error_handler = Factory( + event_worker = Singleton( + EventProcessingWorker, + config=config, + cache=cache, + event_queue=event_queue, + request_handler=request_handler, + poll_event_buf=poll_event_buf + ) + + handshake_with = Callable( + handshake_with, + cache=cache, + identity=identity, + event_queue=event_queue + ) + + identify_coordinators = Callable( + identify_coordinators, + resolver=resolver + ) + + catch_up_with = Callable( + catch_up_with, + request_handler=request_handler, + identity=identity, + kobj_queue=kobj_queue + ) + + error_handler = Singleton( ErrorHandler, - processor=processor, - actor=actor + kobj_queue=kobj_queue, + handshake_with=handshake_with ) - lifecycle = Factory( + lifecycle = Singleton( NodeLifecycle, config=config, identity=identity, graph=graph, - processor=processor, + kobj_queue=kobj_queue, + kobj_worker=kobj_worker, + event_queue=event_queue, + event_worker=event_worker, cache=cache, - actor=actor, - use_kobj_processor_thread=True + handshake_with=handshake_with, + catch_up_with=catch_up_with, + identify_coordinators=identify_coordinators ) - server = Factory( + server = Singleton( NodeServer, config=config, lifecycle=lifecycle, secure=secure, - processor=processor, - event_queue=event_queue, - response_handler=response_handler + kobj_queue=kobj_queue, + response_handler=response_handler, + poll_event_buf=poll_event_buf ) - poller = Factory( + poller = Singleton( NodePoller, - processor=processor, + kobj_queue=kobj_queue, lifecycle=lifecycle, resolver=resolver, config=config diff --git a/src/koi_net/effector.py b/src/koi_net/effector.py index 8e45f08..9eb1f44 100644 --- a/src/koi_net/effector.py +++ b/src/koi_net/effector.py @@ -9,7 +9,7 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from .processor.interface import ProcessorInterface + from .processor.kobj_queue import KobjQueue from .context import ActionContext logger = logging.getLogger(__name__) @@ -24,7 +24,7 @@ class Effector: cache: Cache resolver: NetworkResolver - processor: "ProcessorInterface | None" + processor: "KobjQueue | None" action_context: "ActionContext | None" _action_table: dict[ type[RID], @@ -38,7 +38,7 @@ def __init__( self, cache: Cache, resolver: "NetworkResolver", - processor: "ProcessorInterface", + processor: "KobjQueue", action_context: "ActionContext" ): self.cache = cache @@ -156,7 +156,7 @@ def deref( and bundle is not None and source != BundleSource.CACHE ): - self.processor.handle( + self.processor.put_kobj( bundle=bundle, source=source if type(source) is KoiNetNode else None ) diff --git a/src/koi_net/kobj_worker.py b/src/koi_net/kobj_worker.py new file mode 100644 index 0000000..f52a2fd --- /dev/null +++ b/src/koi_net/kobj_worker.py @@ -0,0 +1,45 @@ +import queue +import traceback +import logging + +from koi_net.models import END +from koi_net.processor.knowledge_pipeline import KnowledgePipeline +from koi_net.processor.kobj_queue import KobjQueue +from koi_net.worker import ThreadWorker + +logger = logging.getLogger(__name__) + + +class KnowledgeProcessingWorker(ThreadWorker): + def __init__( + self, + kobj_queue: KobjQueue, + pipeline: KnowledgePipeline, + timeout: float = 0.1 + ): + self.kobj_queue = kobj_queue + self.pipeline = pipeline + self.timeout = timeout + super().__init__() + + def run(self): + logger.info("Started kobj worker") + while True: + try: + item = self.kobj_queue.q.get(timeout=self.timeout) + try: + if item is END: + logger.info("Received 'END' signal, shutting down...") + return + + logger.info(f"Dequeued {item!r}") + + self.pipeline.process(item) + finally: + self.kobj_queue.q.task_done() + + except queue.Empty: + pass + + except Exception as e: + traceback.print_exc() \ No newline at end of file diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index 988de2d..ca2c09d 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -1,12 +1,17 @@ import logging from contextlib import contextmanager, asynccontextmanager +from typing import Callable from rid_lib.ext import Bundle, Cache from rid_lib.types import KoiNetNode -from .actor import Actor +from koi_net.kobj_worker import KnowledgeProcessingWorker +from koi_net.models import END +from koi_net.network.event_queue import EventQueue +from koi_net.processor.event_worker import EventProcessingWorker + from .config import NodeConfig -from .processor.interface import ProcessorInterface +from .processor.kobj_queue import KobjQueue from .network.graph import NetworkGraph from .identity import NodeIdentity @@ -19,28 +24,38 @@ class NodeLifecycle: config: NodeConfig identity: NodeIdentity graph: NetworkGraph - processor: ProcessorInterface + kobj_queue: KobjQueue + kobj_worker: KnowledgeProcessingWorker + event_queue: EventQueue + event_worker: EventProcessingWorker cache: Cache - actor: Actor - use_kobj_processor_thread: bool def __init__( self, config: NodeConfig, identity: NodeIdentity, graph: NetworkGraph, - processor: ProcessorInterface, + kobj_queue: KobjQueue, + kobj_worker: KnowledgeProcessingWorker, + event_queue: EventQueue, + event_worker: EventProcessingWorker, cache: Cache, - actor: Actor, - use_kobj_processor_thread: bool + handshake_with: Callable, + catch_up_with: Callable, + identify_coordinators: Callable ): self.config = config self.identity = identity self.graph = graph - self.processor = processor + self.kobj_queue = kobj_queue + self.kobj_worker = kobj_worker + self.event_queue = event_queue + self.event_worker = event_worker self.cache = cache - self.actor = actor - self.use_kobj_processor_thread = use_kobj_processor_thread + + self.handshake_with = handshake_with + self.catch_up_with = catch_up_with + self.identify_coordinators = identify_coordinators @contextmanager def run(self): @@ -76,33 +91,32 @@ def start(self): of node bundle. Initiates handshake with first contact if node doesn't have any neighbors. Catches up with coordinator state. """ - if self.use_kobj_processor_thread: - logger.info("Starting processor worker thread") - self.processor.worker_thread.start() + logger.info("Starting processor worker thread") + self.kobj_worker.thread.start() + self.event_worker.thread.start() self.graph.generate() # refresh to reflect changes (if any) in config.yaml - self.processor.handle(bundle=Bundle.generate( + self.kobj_queue.put_kobj(bundle=Bundle.generate( rid=self.identity.rid, contents=self.identity.profile.model_dump() )) logger.debug("Waiting for kobj queue to empty") - if self.use_kobj_processor_thread: - self.processor.kobj_queue.join() - else: - self.processor.flush_kobj_queue() - logger.debug("Done") - + + # TODO: REFACTOR + self.kobj_queue.q.join() + + # TODO: FACTOR OUT BEHAVIOR if not self.graph.get_neighbors() and self.config.koi_net.first_contact.rid: logger.debug(f"I don't have any neighbors, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") - self.actor.handshake_with(self.config.koi_net.first_contact.rid) + self.handshake_with(self.config.koi_net.first_contact.rid) - for coordinator in self.actor.identify_coordinators(): - self.actor.catch_up_with(coordinator, rid_types=[KoiNetNode]) + for coordinator in self.identify_coordinators(): + self.catch_up_with(coordinator, rid_types=[KoiNetNode]) def stop(self): @@ -110,8 +124,7 @@ def stop(self): Finishes processing knowledge object queue. """ - if self.use_kobj_processor_thread: - logger.info(f"Waiting for kobj queue to empty ({self.processor.kobj_queue.unfinished_tasks} tasks remaining)") - self.processor.kobj_queue.join() - else: - self.processor.flush_kobj_queue() \ No newline at end of file + logger.info(f"Waiting for kobj queue to empty ({self.kobj_queue.q.unfinished_tasks} tasks remaining)") + + self.kobj_queue.q.put(END) + self.event_queue.q.put(END) \ No newline at end of file diff --git a/src/koi_net/models.py b/src/koi_net/models.py new file mode 100644 index 0000000..b5e29b4 --- /dev/null +++ b/src/koi_net/models.py @@ -0,0 +1,14 @@ +from pydantic import BaseModel +from rid_lib.types import KoiNetNode +from koi_net.protocol.event import Event + +class End: + """Class for a sentinel value by knowledge handlers.""" + pass + +END = End() + + +class QueuedEvent(BaseModel): + event: Event + target: KoiNetNode \ No newline at end of file diff --git a/src/koi_net/network/error_handler.py b/src/koi_net/network/error_handler.py index fd7cae5..b9f1e3d 100644 --- a/src/koi_net/network/error_handler.py +++ b/src/koi_net/network/error_handler.py @@ -1,9 +1,9 @@ from logging import getLogger +from typing import Callable from koi_net.protocol.errors import ErrorType from koi_net.protocol.event import EventType from rid_lib.types import KoiNetNode -from ..processor.interface import ProcessorInterface -from ..actor import Actor +from ..processor.kobj_queue import KobjQueue logger = getLogger(__name__) @@ -11,16 +11,15 @@ class ErrorHandler: """Handles network errors that may occur during requests.""" timeout_counter: dict[KoiNetNode, int] - processor: ProcessorInterface - actor: Actor + kobj_queue: KobjQueue def __init__( self, - processor: ProcessorInterface, - actor: Actor + kobj_queue: KobjQueue, + handshake_with: Callable ): - self.processor = processor - self.actor = actor + self.kobj_queue = kobj_queue + self.handshake_with = handshake_with self.timeout_counter = {} def handle_connection_error(self, node: KoiNetNode): @@ -32,7 +31,7 @@ def handle_connection_error(self, node: KoiNetNode): if self.timeout_counter[node] > 3: logger.debug(f"Exceeded time out limit, forgetting node") - self.processor.handle(rid=node, event_type=EventType.FORGET) + self.kobj_queue.put_kobj(rid=node, event_type=EventType.FORGET) # do something @@ -46,7 +45,7 @@ def handle_protocol_error( match error_type: case ErrorType.UnknownNode: logger.info("Peer doesn't know me, attempting handshake...") - self.actor.handshake_with(node) + self.handshake_with(node) case ErrorType.InvalidKey: ... case ErrorType.InvalidSignature: ... diff --git a/src/koi_net/network/event_queue.py b/src/koi_net/network/event_queue.py index d85c1d1..cdd5aeb 100644 --- a/src/koi_net/network/event_queue.py +++ b/src/koi_net/network/event_queue.py @@ -1,97 +1,22 @@ import logging from queue import Queue -import httpx -from pydantic import BaseModel -from rid_lib import RID -from rid_lib.ext import Cache + from rid_lib.types import KoiNetNode -from .graph import NetworkGraph -from .request_handler import NodeNotFoundError, RequestHandler -from ..protocol.node import NodeProfile, NodeType -from ..protocol.edge import EdgeProfile, EdgeType +from ..models import QueuedEvent from ..protocol.event import Event -from ..identity import NodeIdentity -from ..config import NodeConfig logger = logging.getLogger(__name__) -class EventQueueModel(BaseModel): - webhook: dict[KoiNetNode, list[Event]] - poll: dict[KoiNetNode, list[Event]] - -type EventQueue = dict[RID, Queue[Event]] - -class NetworkEventQueue: +class EventQueue: """Handles out going network event queues.""" + q: Queue[QueuedEvent] - config: NodeConfig - identity: NodeIdentity - cache: Cache - graph: NetworkGraph - request_handler: RequestHandler - poll_event_queue: EventQueue - webhook_event_queue: EventQueue - - def __init__( - self, - config: NodeConfig, - identity: NodeIdentity, - cache: Cache, - graph: NetworkGraph, - request_handler: RequestHandler, - ): - self.config = config - self.identity = identity - self.graph = graph - self.request_handler = request_handler - self.cache = cache - - self.poll_event_queue = dict() - self.webhook_event_queue = dict() - - def _load_event_queues(self): - """Loads event queues from storage.""" - try: - with open(self.config.koi_net.event_queues_path, "r") as f: - queues = EventQueueModel.model_validate_json(f.read()) - - for node in queues.poll.keys(): - for event in queues.poll[node]: - queue = self.poll_event_queue.setdefault(node, Queue()) - queue.put(event) - - for node in queues.webhook.keys(): - for event in queues.webhook[node]: - queue = self.webhook_event_queue.setdefault(node, Queue()) - queue.put(event) - - except FileNotFoundError: - return - - def _save_event_queues(self): - """Saves event queues to storage.""" - events_model = EventQueueModel( - poll={ - node: list(queue.queue) - for node, queue in self.poll_event_queue.items() - if not queue.empty() - }, - webhook={ - node: list(queue.queue) - for node, queue in self.webhook_event_queue.items() - if not queue.empty() - } - ) - - if len(events_model.poll) == 0 and len(events_model.webhook) == 0: - return - - with open(self.config.koi_net.event_queues_path, "w") as f: - f.write(events_model.model_dump_json(indent=2)) + def __init__(self): + self.q = Queue() - def push_event_to(self, event: Event, node: KoiNetNode, flush=False): + def push_event_to(self, event: Event, target: KoiNetNode): """Pushes event to queue of specified node. Event will be sent to webhook or poll queue depending on the @@ -99,102 +24,6 @@ def push_event_to(self, event: Event, node: KoiNetNode, flush=False): to `True`, the webhook queued will be flushed after pushing the event. """ - logger.debug(f"Pushing event {event.event_type} {event.rid!r} to {node}") - - node_bundle = self.cache.read(node) - - # if there's an edge from me to the target node, override broadcast type - edge_rid = self.graph.get_edge( - source=self.identity.rid, - target=node - ) - - edge_bundle = self.cache.read(edge_rid) if edge_rid else None - - if edge_bundle: - logger.debug(f"Found edge from me to {node!r}") - edge_profile = edge_bundle.validate_contents(EdgeProfile) - if edge_profile.edge_type == EdgeType.WEBHOOK: - event_queue = self.webhook_event_queue - elif edge_profile.edge_type == EdgeType.POLL: - event_queue = self.poll_event_queue - - elif node_bundle: - logger.debug(f"Found bundle for {node!r}") - node_profile = node_bundle.validate_contents(NodeProfile) - if node_profile.node_type == NodeType.FULL: - event_queue = self.webhook_event_queue - elif node_profile.node_type == NodeType.PARTIAL: - event_queue = self.poll_event_queue - - elif node == self.config.koi_net.first_contact.rid: - logger.debug(f"Node {node!r} is my first contact") - # first contact node is always a webhook node - event_queue = self.webhook_event_queue - - else: - logger.warning(f"Node {node!r} unknown to me") - return - - queue = event_queue.setdefault(node, Queue()) - queue.put(event) - - if flush and event_queue is self.webhook_event_queue: - self.flush_webhook_queue(node) - - def _flush_queue(self, event_queue: EventQueue, node: KoiNetNode) -> list[Event]: - """Flushes a node's queue, returning list of events.""" - queue = event_queue.get(node) - events = list() - if queue: - while not queue.empty(): - event = queue.get() - logger.debug(f"Dequeued {event.event_type} {event.rid!r}") - events.append(event) - - return events - - def flush_poll_queue(self, node: KoiNetNode) -> list[Event]: - """Flushes a node's poll queue, returning list of events.""" - logger.debug(f"Flushing poll queue for {node}") - return self._flush_queue(self.poll_event_queue, node) - - def flush_webhook_queue(self, node: KoiNetNode, requeue_on_fail: bool = True): - """Flushes a node's webhook queue, and broadcasts events. - - If node profile is unknown, or node type is not `FULL`, this - operation will fail silently. If 'requeue_on_fail` is `True` and - the remote node cannot be reached, all events will be requeued. - """ - - logger.debug(f"Flushing webhook queue for {node}") - # node_bundle = self.effector.deref(node) - - # if not node_bundle: - # logger.warning(f"{node!r} not found") - # return - - # node_profile = node_bundle.validate_contents(NodeProfile) - - # if node_profile.node_type != NodeType.FULL: - # logger.warning(f"{node!r} is a partial node!") - # return - - events = self._flush_queue(self.webhook_event_queue, node) - if not events: return - - logger.debug(f"Broadcasting {len(events)} events") - - try: - self.request_handler.broadcast_events(node, events=events) - - except NodeNotFoundError: - logger.warning("Broadcast failed (node not found)") - - except httpx.ConnectError: - logger.warning("Broadcast failed (couldn't connect)") - - if requeue_on_fail: - for event in events: - self.push_event_to(event, node) \ No newline at end of file + self.q.put(QueuedEvent(target=target, event=event)) + \ No newline at end of file diff --git a/src/koi_net/poll_event_buffer.py b/src/koi_net/poll_event_buffer.py new file mode 100644 index 0000000..cd83997 --- /dev/null +++ b/src/koi_net/poll_event_buffer.py @@ -0,0 +1,17 @@ +from rid_lib.types import KoiNetNode + +from koi_net.protocol.event import Event + + +class PollEventBuffer: + buffers: dict[KoiNetNode, list[Event]] + + def __init__(self): + self.buffers = dict() + + def put(self, node: KoiNetNode, event: Event): + event_buf = self.buffers.setdefault(node, []) + event_buf.append(event) + + def flush(self, node: KoiNetNode): + return self.buffers.pop(node, []) \ No newline at end of file diff --git a/src/koi_net/poller.py b/src/koi_net/poller.py index 2ff54e4..eab03aa 100644 --- a/src/koi_net/poller.py +++ b/src/koi_net/poller.py @@ -1,7 +1,7 @@ import time import logging -from .processor.interface import ProcessorInterface +from .processor.kobj_queue import KobjQueue from .lifecycle import NodeLifecycle from .network.resolver import NetworkResolver from .config import NodeConfig @@ -11,19 +11,19 @@ class NodePoller: """Manages polling based event loop for partial nodes.""" - processor: ProcessorInterface + kobj_queue: KobjQueue lifecycle: NodeLifecycle resolver: NetworkResolver config: NodeConfig def __init__( self, - processor: ProcessorInterface, + kobj_queue: KobjQueue, lifecycle: NodeLifecycle, resolver: NetworkResolver, config: NodeConfig ): - self.processor = processor + self.kobj_queue = kobj_queue self.lifecycle = lifecycle self.resolver = resolver self.config = config @@ -33,8 +33,7 @@ def poll(self): neighbors = self.resolver.poll_neighbors() for node_rid in neighbors: for event in neighbors[node_rid]: - self.processor.handle(event=event, source=node_rid) - self.processor.flush_kobj_queue() + self.kobj_queue.put_kobj(event=event, source=node_rid) def run(self): """Runs polling event loop.""" diff --git a/src/koi_net/processor/event_worker.py b/src/koi_net/processor/event_worker.py new file mode 100644 index 0000000..237848e --- /dev/null +++ b/src/koi_net/processor/event_worker.py @@ -0,0 +1,119 @@ +import queue +import traceback +import time +import logging + +from rid_lib.ext import Cache +from rid_lib.types import KoiNetNode + +from koi_net.config import NodeConfig +from koi_net.models import END, QueuedEvent +from koi_net.network.event_queue import EventQueue +from koi_net.network.request_handler import RequestHandler +from koi_net.poll_event_buffer import PollEventBuffer +from koi_net.protocol.event import Event +from koi_net.protocol.node import NodeProfile, NodeType +from koi_net.worker import ThreadWorker + +logger = logging.getLogger(__name__) + + +class EventProcessingWorker(ThreadWorker): + event_buffer: dict[KoiNetNode, list[Event]] + buffer_times: dict[KoiNetNode, float] + + def __init__( + self, + event_queue: EventQueue, + request_handler: RequestHandler, + config: NodeConfig, + cache: Cache, + poll_event_buf: PollEventBuffer, + queue_timeout: float = 0.1, + max_buf_len: int = 5, + max_wait_time: float = 10.0 + ): + self.event_queue = event_queue + self.request_handler = request_handler + + self.config = config + self.cache = cache + self.poll_event_buf = poll_event_buf + + self.timeout = queue_timeout + self.max_buf_len = max_buf_len + self.max_wait_time = max_wait_time + + self.event_buffer = dict() + self.buffer_times = dict() + + super().__init__() + + def flush_buffer(self, target: KoiNetNode, buffer: list[Event]): + try: + self.request_handler.broadcast_events(target, events=buffer) + except Exception as e: + traceback.print_exc() + + self.event_buffer[target] = [] + self.buffer_times[target] = None + + def decide_event(self, item: QueuedEvent) -> bool: + node_bundle = self.cache.read(item.target) + if node_bundle: + node_profile = node_bundle.validate_contents(NodeProfile) + + if node_profile.node_type == NodeType.FULL: + return True + + elif node_profile.node_type == NodeType.PARTIAL: + self.poll_event_buf.put(item.target, item.event) + return False + + elif item.target == self.config.koi_net.first_contact.rid: + return True + + else: + logger.warning(f"Couldn't handle event {item.event!r} in queue, node {item.target!r} unknown to me") + return False + + + def run(self): + logger.info("Started event worker") + while True: + now = time.time() + try: + item = self.event_queue.q.get(timeout=self.timeout) + + try: + if item is END: + logger.info("Received 'END' signal, shutting down...") + return + + logger.info(f"Dequeued {item.event!r} -> {item.target!r}") + + if not self.decide_event(item): + continue + + event_buf = self.event_buffer.setdefault(item.target, []) + if not event_buf: + self.buffer_times[item.target] = now + + event_buf.append(item.event) + + # When new events are dequeued, check buffer for max length + if len(event_buf) >= self.max_buf_len: + self.flush_buffer(item.target, event_buf) + finally: + self.event_queue.q.task_done() + + except queue.Empty: + # On timeout, check all buffers for max wait time + for target, event_buf in self.event_buffer.items(): + if (len(event_buf) == 0) or (self.buffer_times.get(target) is None): + continue + if (now - self.buffer_times[target]) >= self.max_wait_time: + self.flush_buffer(target, event_buf) + + except Exception as e: + traceback.print_exc() \ No newline at end of file diff --git a/src/koi_net/processor/interface.py b/src/koi_net/processor/interface.py deleted file mode 100644 index a23b81f..0000000 --- a/src/koi_net/processor/interface.py +++ /dev/null @@ -1,106 +0,0 @@ -import logging -import queue -import threading -from rid_lib.core import RID -from rid_lib.ext import Bundle, Manifest -from rid_lib.types import KoiNetNode -from ..protocol.event import Event, EventType -from .knowledge_object import KnowledgeObject -from .knowledge_pipeline import KnowledgePipeline - - -logger = logging.getLogger(__name__) - - -class ProcessorInterface: - """Provides access to this node's knowledge processing pipeline.""" - pipeline: KnowledgePipeline - kobj_queue: queue.Queue[KnowledgeObject] - use_kobj_processor_thread: bool - worker_thread: threading.Thread | None = None - - def __init__( - self, - pipeline: KnowledgePipeline, - use_kobj_processor_thread: bool, - ): - self.pipeline = pipeline - self.use_kobj_processor_thread = use_kobj_processor_thread - self.kobj_queue = queue.Queue() - - if self.use_kobj_processor_thread: - self.worker_thread = threading.Thread( - target=self.kobj_processor_worker, - daemon=True - ) - - def flush_kobj_queue(self): - """Flushes all knowledge objects from queue and processes them. - - NOTE: ONLY CALL THIS METHOD IN SINGLE THREADED NODES, OTHERWISE - THIS WILL CAUSE RACE CONDITIONS. - """ - if self.use_kobj_processor_thread: - logger.warning("You are using a worker thread, calling this method can cause race conditions!") - - while not self.kobj_queue.empty(): - kobj = self.kobj_queue.get() - logger.debug(f"Dequeued {kobj!r}") - - try: - self.pipeline.process(kobj) - finally: - self.kobj_queue.task_done() - logger.debug("Done") - - def kobj_processor_worker(self, timeout=0.1): - while True: - try: - kobj = self.kobj_queue.get(timeout=timeout) - logger.debug(f"Dequeued {kobj!r}") - - try: - self.pipeline.process(kobj) - finally: - self.kobj_queue.task_done() - logger.debug("Done") - - except queue.Empty: - pass - - except Exception as e: - logger.warning(f"Error processing kobj: {e}") - - def handle( - self, - rid: RID | None = None, - manifest: Manifest | None = None, - bundle: Bundle | None = None, - event: Event | None = None, - kobj: KnowledgeObject | None = None, - event_type: EventType | None = None, - source: KoiNetNode | None = None - ): - """Queues knowledge object to be handled by processing pipeline. - - Knowledge may take the form of an RID, manifest, bundle, event, - or knowledge object (with an optional event type for RID, - manifest, or bundle objects). All objects will be normalized - to knowledge objects and queued. If `flush` is `True`, the queue - will be flushed immediately after adding the new knowledge. - """ - if rid: - _kobj = KnowledgeObject.from_rid(rid, event_type, source) - elif manifest: - _kobj = KnowledgeObject.from_manifest(manifest, event_type, source) - elif bundle: - _kobj = KnowledgeObject.from_bundle(bundle, event_type, source) - elif event: - _kobj = KnowledgeObject.from_event(event, source) - elif kobj: - _kobj = kobj - else: - raise ValueError("One of 'rid', 'manifest', 'bundle', 'event', or 'kobj' must be provided") - - self.kobj_queue.put(_kobj) - logger.debug(f"Queued {_kobj!r}") diff --git a/src/koi_net/processor/knowledge_pipeline.py b/src/koi_net/processor/knowledge_pipeline.py index cd0d0af..2de7e27 100644 --- a/src/koi_net/processor/knowledge_pipeline.py +++ b/src/koi_net/processor/knowledge_pipeline.py @@ -5,7 +5,7 @@ from rid_lib.ext import Cache from ..protocol.event import EventType from ..network.request_handler import RequestHandler -from ..network.event_queue import NetworkEventQueue +from ..network.event_queue import EventQueue from ..network.graph import NetworkGraph from ..identity import NodeIdentity from .handler import ( @@ -28,7 +28,7 @@ class KnowledgePipeline: cache: Cache identity: NodeIdentity request_handler: RequestHandler - event_queue: NetworkEventQueue + event_queue: EventQueue graph: NetworkGraph handlers: list[KnowledgeHandler] @@ -37,7 +37,7 @@ def __init__( handler_context: "HandlerContext", cache: Cache, request_handler: RequestHandler, - event_queue: NetworkEventQueue, + event_queue: EventQueue, graph: NetworkGraph, default_handlers: list[KnowledgeHandler] = [] ): diff --git a/src/koi_net/processor/kobj_queue.py b/src/koi_net/processor/kobj_queue.py new file mode 100644 index 0000000..be6ba10 --- /dev/null +++ b/src/koi_net/processor/kobj_queue.py @@ -0,0 +1,51 @@ +import logging +from queue import Queue +from rid_lib.core import RID +from rid_lib.ext import Bundle, Manifest +from rid_lib.types import KoiNetNode +from ..protocol.event import Event, EventType +from .knowledge_object import KnowledgeObject + +logger = logging.getLogger(__name__) + + +class KobjQueue: + """Provides access to this node's knowledge processing pipeline.""" + q: Queue[KnowledgeObject] + + def __init__(self): + self.q = Queue() + + def put_kobj( + self, + rid: RID | None = None, + manifest: Manifest | None = None, + bundle: Bundle | None = None, + event: Event | None = None, + kobj: KnowledgeObject | None = None, + event_type: EventType | None = None, + source: KoiNetNode | None = None + ): + """Queues knowledge object to be handled by processing pipeline. + + Knowledge may take the form of an RID, manifest, bundle, event, + or knowledge object (with an optional event type for RID, + manifest, or bundle objects). All objects will be normalized + to knowledge objects and queued. If `flush` is `True`, the queue + will be flushed immediately after adding the new knowledge. + """ + if rid: + _kobj = KnowledgeObject.from_rid(rid, event_type, source) + elif manifest: + _kobj = KnowledgeObject.from_manifest(manifest, event_type, source) + elif bundle: + _kobj = KnowledgeObject.from_bundle(bundle, event_type, source) + elif event: + _kobj = KnowledgeObject.from_event(event, source) + elif kobj: + _kobj = kobj + else: + raise ValueError("One of 'rid', 'manifest', 'bundle', 'event', or 'kobj' must be provided") + + self.q.put(_kobj) + logger.debug(f"Queued {_kobj!r}") diff --git a/src/koi_net/server.py b/src/koi_net/server.py index 86f0347..b1cfdf4 100644 --- a/src/koi_net/server.py +++ b/src/koi_net/server.py @@ -3,9 +3,10 @@ from contextlib import asynccontextmanager from fastapi import FastAPI, APIRouter from fastapi.responses import JSONResponse -from .network.event_queue import NetworkEventQueue + +from koi_net.poll_event_buffer import PollEventBuffer from .network.response_handler import ResponseHandler -from .processor.interface import ProcessorInterface +from .processor.kobj_queue import KobjQueue from .protocol.api_models import ( PollEvents, FetchRids, @@ -38,8 +39,8 @@ class NodeServer: config: NodeConfig lifecycle: NodeLifecycle secure: Secure - processor: ProcessorInterface - event_queue: NetworkEventQueue + kobj_queue: KobjQueue + poll_event_buf: PollEventBuffer response_handler: ResponseHandler app: FastAPI router: APIRouter @@ -49,15 +50,15 @@ def __init__( config: NodeConfig, lifecycle: NodeLifecycle, secure: Secure, - processor: ProcessorInterface, - event_queue: NetworkEventQueue, + kobj_queue: KobjQueue, + poll_event_buf: PollEventBuffer, response_handler: ResponseHandler ): self.config = config self.lifecycle = lifecycle self.secure = secure - self.processor = processor - self.event_queue = event_queue + self.kobj_queue = kobj_queue + self.poll_event_buf = poll_event_buf self.response_handler = response_handler self._build_app() @@ -115,14 +116,14 @@ async def broadcast_events(self, req: SignedEnvelope[EventsPayload]): """Handles events broadcast endpoint.""" logger.info(f"Request to {BROADCAST_EVENTS_PATH}, received {len(req.payload.events)} event(s)") for event in req.payload.events: - self.processor.handle(event=event, source=req.source_node) + self.kobj_queue.put_kobj(event=event, source=req.source_node) async def poll_events( self, req: SignedEnvelope[PollEvents] ) -> SignedEnvelope[EventsPayload] | ErrorResponse: """Handles poll events endpoint.""" logger.info(f"Request to {POLL_EVENTS_PATH}") - events = self.event_queue.flush_poll_queue(req.source_node) + events = self.poll_event_buf.flush(req.source_node) return EventsPayload(events=events) async def fetch_rids( diff --git a/src/koi_net/worker.py b/src/koi_net/worker.py new file mode 100644 index 0000000..20394d0 --- /dev/null +++ b/src/koi_net/worker.py @@ -0,0 +1,10 @@ +import threading + +class ThreadWorker: + thread: threading.Thread + + def __init__(self): + self.thread = threading.Thread(target=self.run) + + def run(self): + ... \ No newline at end of file From 7ff30e6e28e2627768b2f9a81412bf8adacf1454 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 23 Sep 2025 16:12:11 -0400 Subject: [PATCH 05/53] factored out dependency injector, done manually with NodeAssembler and NodeContainer classes. grouped behaviors back into a class Behaviors (prev Actor) --- examples/partial.py | 12 +- pyproject.toml | 1 - src/koi_net/behaviors.py | 115 +++--- src/koi_net/core.py | 426 ++++++++++---------- src/koi_net/lifecycle.py | 16 +- src/koi_net/network/error_handler.py | 7 +- src/koi_net/processor/knowledge_pipeline.py | 24 +- uv.lock | 28 -- 8 files changed, 276 insertions(+), 353 deletions(-) diff --git a/examples/partial.py b/examples/partial.py index 449e928..a9248e2 100644 --- a/examples/partial.py +++ b/examples/partial.py @@ -1,8 +1,8 @@ import logging from pydantic import Field -from dependency_injector.providers import Factory from rich.logging import RichHandler from koi_net import NodeContainer +from koi_net.core import NodeAssembler from koi_net.protocol.node import NodeProfile, NodeType from koi_net.config import NodeConfig, KoiNetConfig @@ -30,12 +30,10 @@ class PartialNodeConfig(NodeConfig): ) ) -class PartialNodeContainer(NodeContainer): - config = Factory( - PartialNodeConfig.load_from_yaml, - "partial_config.yaml" - ) +class PartialNodeAssembler(NodeAssembler): + config = PartialNodeConfig if __name__ == "__main__": - PartialNodeContainer().poller().run() \ No newline at end of file + node = PartialNodeAssembler.create() + node.server.run() \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 17a60f0..94ca887 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,6 @@ dependencies = [ "fastapi>=0.115.12", "uvicorn>=0.34.2", "rich>=14.1.0", - "dependency-injector>=4.48.1", ] [project.optional-dependencies] diff --git a/src/koi_net/behaviors.py b/src/koi_net/behaviors.py index c835aea..a74c717 100644 --- a/src/koi_net/behaviors.py +++ b/src/koi_net/behaviors.py @@ -14,67 +14,58 @@ logger = getLogger(__name__) -def handshake_with( - cache: Cache, - identity: NodeIdentity, - event_queue: EventQueue, - target: KoiNetNode -): - """Initiates a handshake with target node. - - Pushes successive `FORGET` and `NEW` events to target node to - reset the target's cache in case it already knew this node. - """ - - logger.debug(f"Initiating handshake with {target}") - event_queue.push_event_to( - Event.from_rid( - event_type=EventType.FORGET, - rid=identity.rid), - target=target - ) - - event_queue.push_event_to( - event=Event.from_bundle( - event_type=EventType.NEW, - bundle=cache.read(identity.rid)), - target=target - ) - - # self.ctx.event_queue.flush_webhook_queue(target) -def identify_coordinators(resolver: NetworkResolver) -> list[KoiNetNode]: - """Returns node's providing state for `orn:koi-net.node`.""" - return resolver.get_state_providers(KoiNetNode) +class Behaviors: + def __init__(self, cache: Cache, identity: NodeIdentity, event_queue: EventQueue, resolver: NetworkResolver, request_handler: RequestHandler, kobj_queue: KobjQueue): + self.cache = cache + self.identity = identity + self.event_queue = event_queue + self.resolver = resolver + self.request_handler = request_handler + self.kobj_queue = kobj_queue -def catch_up_with( - request_handler: RequestHandler, - identity: NodeIdentity, - kobj_queue: KobjQueue, - target: KoiNetNode, - rid_types: list[RIDType] = [] -): - """Fetches and processes knowledge objects from target node. - - Args: - target: Node to catch up with - rid_types: RID types to fetch from target (all types if list is empty) - """ - logger.debug(f"catching up with {target} on {rid_types or 'all types'}") - - payload = request_handler.fetch_manifests( - node=target, - rid_types=rid_types - ) - if type(payload) == ErrorResponse: - logger.debug("failed to reach node") - return - - for manifest in payload.manifests: - if manifest.rid == identity.rid: - continue - - kobj_queue.put_kobj( - manifest=manifest, - source=target - ) \ No newline at end of file + def handshake_with(self, target: KoiNetNode): + """Initiates a handshake with target node. + Pushes successive `FORGET` and `NEW` events to target node to + reset the target's cache in case it already knew this node. + """ + logger.debug(f"Initiating handshake with {target}") + self.event_queue.push_event_to( + Event.from_rid( + event_type=EventType.FORGET, + rid=self.identity.rid), + target=target + ) + self.event_queue.push_event_to( + event=Event.from_bundle( + event_type=EventType.NEW, + bundle=self.cache.read(self.identity.rid)), + target=target + ) + # self.ctx.event_queue.flush_webhook_queue(target) + + def identify_coordinators(self) -> list[KoiNetNode]: + """Returns node's providing state for `orn:koi-net.node`.""" + return self.resolver.get_state_providers(KoiNetNode) + + def catch_up_with(self, target: KoiNetNode, rid_types: list[RIDType] = []): + """Fetches and processes knowledge objects from target node. + Args: + target: Node to catch up with + rid_types: RID types to fetch from target (all types if list is empty) + """ + logger.debug(f"catching up with {target} on {rid_types or 'all types'}") + payload = self.request_handler.fetch_manifests( + node=target, + rid_types=rid_types + ) + if type(payload) == ErrorResponse: + logger.debug("failed to reach node") + return + for manifest in payload.manifests: + if manifest.rid == self.identity.rid: + continue + self.kobj_queue.put_kobj( + manifest=manifest, + source=target + ) \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 4f50c68..bca0261 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,228 +1,210 @@ -import logging -from dependency_injector.providers import Factory, Callable, List, Object, Singleton -from dependency_injector.containers import DeclarativeContainer, WiringConfiguration - +from dataclasses import dataclass from rid_lib.ext import Cache - -from koi_net.cache_adapter import CacheProvider +from koi_net.behaviors import Behaviors +from koi_net.config import NodeConfig +from koi_net.context import HandlerContext +from koi_net.identity import NodeIdentity +from koi_net.kobj_worker import KnowledgeProcessingWorker +from koi_net.lifecycle import NodeLifecycle +from koi_net.network.error_handler import ErrorHandler +from koi_net.network.event_queue import EventQueue +from koi_net.network.graph import NetworkGraph +from koi_net.network.request_handler import RequestHandler +from koi_net.network.resolver import NetworkResolver +from koi_net.network.response_handler import ResponseHandler from koi_net.poll_event_buffer import PollEventBuffer +from koi_net.poller import NodePoller +from koi_net.processor.default_handlers import ( + basic_manifest_handler, + basic_network_output_filter, + basic_rid_handler, + coordinator_contact, + edge_negotiation_handler, + forget_edge_on_node_deletion, + secure_profile_handler +) from koi_net.processor.event_worker import EventProcessingWorker -from koi_net.kobj_worker import KnowledgeProcessingWorker - -from .network.resolver import NetworkResolver -from .network.event_queue import EventQueue -from .network.graph import NetworkGraph -from .network.request_handler import RequestHandler -from .network.response_handler import ResponseHandler -from .network.error_handler import ErrorHandler -from .processor.kobj_queue import KobjQueue -from .processor import default_handlers -from .processor.handler import KnowledgeHandler -from .processor.knowledge_pipeline import KnowledgePipeline -from .identity import NodeIdentity -from .secure import Secure -from .config import NodeConfig -from .context import HandlerContext, ActionContext -from .effector import Effector -from .server import NodeServer -from .lifecycle import NodeLifecycle -from .poller import NodePoller -from . import default_actions -from . import behaviors -from .behaviors import handshake_with, identify_coordinators, catch_up_with +from koi_net.processor.knowledge_pipeline import KnowledgePipeline +from koi_net.processor.kobj_queue import KobjQueue +from koi_net.secure import Secure +from koi_net.server import NodeServer -logger = logging.getLogger(__name__) - - -# T = TypeVar("T", bound=NodeConfig) - -class NodeContainer(DeclarativeContainer): - """Interface for a node's subsystems. - - This class embodies a node, and wires up all of its subsystems to - work together. Currently, node implementations create an instance of - this class and override behavior where needed. Most commonly this - will be creating a new `Config` class, and adding additional knowledge - handlers to the `pipeline`, but all subsystems may be overriden by - passing new class implementations into `__init__`. - """ +@dataclass +class NodeContainer: + poll_event_buf: PollEventBuffer + kobj_queue: KobjQueue + event_queue: EventQueue + config: NodeConfig + cache: Cache + identity: NodeIdentity + graph: NetworkGraph + secure: Secure + request_handler: RequestHandler + response_handler: ResponseHandler + resolver: NetworkResolver + handler_context: HandlerContext + behaviors: Behaviors + pipeline: KnowledgePipeline + kobj_worker: KnowledgeProcessingWorker + event_worker: EventProcessingWorker + error_handler: ErrorHandler + lifecycle: NodeLifecycle + server: NodeServer + poller: NodePoller - wiring_config = WiringConfiguration( - modules=["koi_net.behaviors"] - ) - - poll_event_buf = Singleton(PollEventBuffer) - - kobj_queue = Singleton(KobjQueue) - event_queue = Singleton(EventQueue) - config = Singleton( - NodeConfig.load_from_yaml - ) - - cache = Singleton( - CacheProvider, - config=config - ) - - identity = Singleton( - NodeIdentity, - config=config - ) - - # effector = Singleton( - # Effector, - # cache=cache, - # resolver=Self, - # processor=Self, - # action_context=Self - # ) - - graph = Singleton( - NetworkGraph, - cache=cache, - identity=identity - ) - - secure = Singleton( - Secure, - identity=identity, - cache=cache, - config=config - ) - - request_handler = Singleton( - RequestHandler, - cache=cache, - identity=identity, - secure=secure - ) - - response_handler = Singleton( - ResponseHandler, - cache=cache - ) - - resolver = Singleton( - NetworkResolver, - config=config, - cache=cache, - identity=identity, - graph=graph, - request_handler=request_handler - ) - - knowledge_handlers = List( - Object(default_handlers.basic_rid_handler), - Object(default_handlers.basic_manifest_handler), - Object(default_handlers.secure_profile_handler), - Object(default_handlers.edge_negotiation_handler), - Object(default_handlers.coordinator_contact), - Object(default_handlers.basic_network_output_filter), - Object(default_handlers.forget_edge_on_node_deletion) - ) - - # action_context = Singleton( - # ActionContext, - # identity=identity, - # cache=cache - # ) - - handler_context = Singleton( - HandlerContext, - identity=identity, - config=config, - cache=cache, - event_queue=event_queue, - graph=graph, - request_handler=request_handler, - resolver=resolver - ) - - # actor = Singleton( - # Actor, - # ctx=handler_context - # ) - - pipeline = Singleton( - KnowledgePipeline, - handler_context=handler_context, - cache=cache, - request_handler=request_handler, - event_queue=event_queue, - graph=graph, - default_handlers=knowledge_handlers - ) - - kobj_worker = Singleton( - KnowledgeProcessingWorker, - kobj_queue=kobj_queue, - pipeline=pipeline - ) - - event_worker = Singleton( - EventProcessingWorker, - config=config, - cache=cache, - event_queue=event_queue, - request_handler=request_handler, - poll_event_buf=poll_event_buf - ) - - handshake_with = Callable( - handshake_with, - cache=cache, - identity=identity, - event_queue=event_queue - ) - - identify_coordinators = Callable( - identify_coordinators, - resolver=resolver - ) - - catch_up_with = Callable( - catch_up_with, - request_handler=request_handler, - identity=identity, - kobj_queue=kobj_queue - ) - - error_handler = Singleton( - ErrorHandler, - kobj_queue=kobj_queue, - handshake_with=handshake_with - ) - - lifecycle = Singleton( - NodeLifecycle, - config=config, - identity=identity, - graph=graph, - kobj_queue=kobj_queue, - kobj_worker=kobj_worker, - event_queue=event_queue, - event_worker=event_worker, - cache=cache, - handshake_with=handshake_with, - catch_up_with=catch_up_with, - identify_coordinators=identify_coordinators - ) - - server = Singleton( - NodeServer, - config=config, - lifecycle=lifecycle, - secure=secure, - kobj_queue=kobj_queue, - response_handler=response_handler, - poll_event_buf=poll_event_buf - ) - - poller = Singleton( - NodePoller, - kobj_queue=kobj_queue, - lifecycle=lifecycle, - resolver=resolver, - config=config - ) \ No newline at end of file +class NodeAssembler: + poll_event_buf = PollEventBuffer + kobj_queue = KobjQueue + event_queue = EventQueue + config = NodeConfig + cache = Cache + identity = NodeIdentity + graph = NetworkGraph + secure = Secure + request_handler = RequestHandler + response_handler = ResponseHandler + resolver = NetworkResolver + knowledge_handlers = [ + basic_rid_handler, + basic_manifest_handler, + secure_profile_handler, + edge_negotiation_handler, + coordinator_contact, + basic_network_output_filter, + forget_edge_on_node_deletion + ] + handler_context = HandlerContext + behaviors = Behaviors + pipeline = KnowledgePipeline + kobj_worker = KnowledgeProcessingWorker + event_worker = EventProcessingWorker + error_handler = ErrorHandler + lifecycle = NodeLifecycle + server = NodeServer + poller = NodePoller + + @classmethod + def create(cls) -> NodeContainer: + poll_event_buffer = cls.poll_event_buf() + kobj_queue = cls.kobj_queue() + event_queue = cls.event_queue() + config = cls.config.load_from_yaml() + cache = cls.cache( + directory_path=config.koi_net.cache_directory_path + ) + identity = cls.identity( + config=config + ) + graph = cls.graph( + cache=cache, + identity=identity + ) + secure = cls.secure( + identity=identity, + cache=cache, + config=config + ) + request_handler = cls.request_handler( + cache=cache, + identity=identity, + secure=secure + ) + response_handler = cls.response_handler( + cache=cache + ) + resolver = cls.resolver( + config=config, + cache=cache, + identity=identity, + graph=graph, + request_handler=request_handler + ) + handler_context = cls.handler_context( + identity=identity, + config=config, + cache=cache, + event_queue=event_queue, + graph=graph, + request_handler=request_handler, + resolver=resolver + ) + behaviors = cls.behaviors( + cache=cache, + identity=identity, + event_queue=event_queue, + resolver=resolver, + request_handler=request_handler, + kobj_queue=kobj_queue + ) + pipeline = cls.pipeline( + handler_context=handler_context, + cache=cache, + request_handler=request_handler, + event_queue=event_queue, + graph=graph, + knowledge_handlers=cls.knowledge_handlers + ) + kobj_worker = cls.kobj_worker( + kobj_queue=kobj_queue, + pipeline=pipeline + ) + event_worker = cls.event_worker( + config=config, + cache=cache, + event_queue=event_queue, + request_handler=request_handler, + poll_event_buf=poll_event_buffer + ) + error_handler = cls.error_handler( + kobj_queue=kobj_queue, + behaviors=behaviors + ) + lifecycle = cls.lifecycle( + config=config, + identity=identity, + graph=graph, + kobj_queue=kobj_queue, + kobj_worker=kobj_worker, + event_queue=event_queue, + event_worker=event_worker, + cache=cache, + behaviors=behaviors + ) + server = cls.server( + config=config, + lifecycle=lifecycle, + secure=secure, + kobj_queue=kobj_queue, + response_handler=response_handler, + poll_event_buf=poll_event_buffer + ) + poller = cls.poller( + kobj_queue=kobj_queue, + lifecycle=lifecycle, + resolver=resolver, + config=config + ) + + return NodeContainer( + poll_event_buf=poll_event_buffer, + kobj_queue=kobj_queue, + event_queue=event_queue, + config=config, + cache=cache, + identity=identity, + graph=graph, + secure=secure, + request_handler=request_handler, + response_handler=response_handler, + resolver=resolver, + handler_context=handler_context, + behaviors=behaviors, + pipeline=pipeline, + kobj_worker=kobj_worker, + event_worker=event_worker, + error_handler=error_handler, + lifecycle=lifecycle, + server=server, + poller=poller + ) \ No newline at end of file diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index ca2c09d..afdb196 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -1,10 +1,10 @@ import logging from contextlib import contextmanager, asynccontextmanager -from typing import Callable from rid_lib.ext import Bundle, Cache from rid_lib.types import KoiNetNode +from koi_net.behaviors import Behaviors from koi_net.kobj_worker import KnowledgeProcessingWorker from koi_net.models import END from koi_net.network.event_queue import EventQueue @@ -40,9 +40,7 @@ def __init__( event_queue: EventQueue, event_worker: EventProcessingWorker, cache: Cache, - handshake_with: Callable, - catch_up_with: Callable, - identify_coordinators: Callable + behaviors: Behaviors ): self.config = config self.identity = identity @@ -53,9 +51,7 @@ def __init__( self.event_worker = event_worker self.cache = cache - self.handshake_with = handshake_with - self.catch_up_with = catch_up_with - self.identify_coordinators = identify_coordinators + self.behaviors = behaviors @contextmanager def run(self): @@ -113,10 +109,10 @@ def start(self): if not self.graph.get_neighbors() and self.config.koi_net.first_contact.rid: logger.debug(f"I don't have any neighbors, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") - self.handshake_with(self.config.koi_net.first_contact.rid) + self.behaviors.handshake_with(self.config.koi_net.first_contact.rid) - for coordinator in self.identify_coordinators(): - self.catch_up_with(coordinator, rid_types=[KoiNetNode]) + for coordinator in self.behaviors.identify_coordinators(): + self.behaviors.catch_up_with(coordinator, rid_types=[KoiNetNode]) def stop(self): diff --git a/src/koi_net/network/error_handler.py b/src/koi_net/network/error_handler.py index b9f1e3d..8cacbcc 100644 --- a/src/koi_net/network/error_handler.py +++ b/src/koi_net/network/error_handler.py @@ -1,5 +1,6 @@ from logging import getLogger from typing import Callable +from koi_net.behaviors import Behaviors from koi_net.protocol.errors import ErrorType from koi_net.protocol.event import EventType from rid_lib.types import KoiNetNode @@ -16,10 +17,10 @@ class ErrorHandler: def __init__( self, kobj_queue: KobjQueue, - handshake_with: Callable + behaviors: Behaviors ): self.kobj_queue = kobj_queue - self.handshake_with = handshake_with + self.behaviors = behaviors self.timeout_counter = {} def handle_connection_error(self, node: KoiNetNode): @@ -45,7 +46,7 @@ def handle_protocol_error( match error_type: case ErrorType.UnknownNode: logger.info("Peer doesn't know me, attempting handshake...") - self.handshake_with(node) + self.behaviors.handshake_with(node) case ErrorType.InvalidKey: ... case ErrorType.InvalidSignature: ... diff --git a/src/koi_net/processor/knowledge_pipeline.py b/src/koi_net/processor/knowledge_pipeline.py index 2de7e27..cf806cd 100644 --- a/src/koi_net/processor/knowledge_pipeline.py +++ b/src/koi_net/processor/knowledge_pipeline.py @@ -30,7 +30,7 @@ class KnowledgePipeline: request_handler: RequestHandler event_queue: EventQueue graph: NetworkGraph - handlers: list[KnowledgeHandler] + knowledge_handlers: list[KnowledgeHandler] def __init__( self, @@ -39,31 +39,15 @@ def __init__( request_handler: RequestHandler, event_queue: EventQueue, graph: NetworkGraph, - default_handlers: list[KnowledgeHandler] = [] + knowledge_handlers: list[KnowledgeHandler] = [] ): self.handler_context = handler_context self.cache = cache self.request_handler = request_handler self.event_queue = event_queue self.graph = graph - self.handlers = default_handlers + self.knowledge_handlers = knowledge_handlers - def add_handler(self, handler: KnowledgeHandler): - self.handlers.append(handler) - - def register_handler( - self, - handler_type: HandlerType, - rid_types: list[RIDType] | None = None, - event_types: list[EventType | None] | None = None - ): - """Assigns decorated function as handler for this processor.""" - def decorator(func: Callable) -> Callable: - handler = KnowledgeHandler(func, handler_type, rid_types, event_types) - self.add_handler(handler) - return func - return decorator - def call_handler_chain( self, handler_type: HandlerType, @@ -79,7 +63,7 @@ def call_handler_chain( Handlers will only be called in the chain if their handler and RID type match that of the inputted knowledge object. """ - for handler in self.handlers: + for handler in self.knowledge_handlers: if handler_type != handler.handler_type: continue diff --git a/uv.lock b/uv.lock index 9f74552..cc4bff2 100644 --- a/uv.lock +++ b/uv.lock @@ -309,32 +309,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/23/87/7ce86f3fa14bc11a5a48c30d8103c26e09b6465f8d8e9d74cf7a0714f043/cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63", size = 3332908, upload-time = "2025-09-01T11:14:58.78Z" }, ] -[[package]] -name = "dependency-injector" -version = "4.48.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/26/7c/5062c4a7ffd32bf210ff55fab9d279a5beeae350fb09533d3536811e13b6/dependency_injector-4.48.1.tar.gz", hash = "sha256:1805185e4522effad6d5e348c255d27e80d3f8adc89701daf13d743367392978", size = 1100885, upload-time = "2025-06-20T10:21:52.248Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/f9/c9b77652f724aece8856e281f7a71e5af544049b3c068df70c68868e43be/dependency_injector-4.48.1-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:a6f73011d532f3ea59689aad85c7999be6da3f30393041a745d5861cdcdc02e4", size = 1631637, upload-time = "2025-06-20T10:21:24.729Z" }, - { url = "https://files.pythonhosted.org/packages/ea/f0/d91c9cdabb1f2354762aca588757d1aa341f3cbccbc8636dd2c06acac10b/dependency_injector-4.48.1-cp38-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ac09f508fa9aee06a036ebf3e3d3b2a210276aba1993e9993cec7f1fdc5fd89e", size = 1855944, upload-time = "2025-06-20T10:21:26.753Z" }, - { url = "https://files.pythonhosted.org/packages/57/ee/d69c4758a12653edbe6ee15c0bf4195981c9820650a1cfa762cbb838485b/dependency_injector-4.48.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b05a4a980096b53ad90a87965c5450183bfbb8bbe36615d7cea97537086d622", size = 1811989, upload-time = "2025-06-20T10:21:28.293Z" }, - { url = "https://files.pythonhosted.org/packages/cf/6d/d2a257402c8c3f7a9c61f1b8a0482ec4373f1ef7fdfe784a91e883506e3b/dependency_injector-4.48.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0506e98440ee6c48fe660016d602961b1b3ecc0a8227838a2221048ed11e2fca", size = 1826408, upload-time = "2025-06-20T10:21:29.789Z" }, - { url = "https://files.pythonhosted.org/packages/65/f9/2a408d460eedb264f7ea919754c526c8f3a18c026496cacb7dd6960766d2/dependency_injector-4.48.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1994622eae8917138626303b176cba4c74e625ba1e588cb09d673ca175d299a2", size = 1863948, upload-time = "2025-06-20T10:21:31.951Z" }, - { url = "https://files.pythonhosted.org/packages/6e/8a/2edaef77e725dd8b1a625c84cbccb0f445afe58277c7b243cbf58784826a/dependency_injector-4.48.1-cp38-abi3-win32.whl", hash = "sha256:58d4d81f92e3267c331f160cbbb517fd7644b95ee57a0d6e4b01f53a7e437a4a", size = 1516768, upload-time = "2025-06-20T10:21:33.747Z" }, - { url = "https://files.pythonhosted.org/packages/8c/41/4bf523af7e1b7f367499f8b8709e0e807e9a14c7d1674b0442d7f84403c8/dependency_injector-4.48.1-cp38-abi3-win_amd64.whl", hash = "sha256:572b22b7db9b103718ea52634b5ca1ef763278338310254334f4633a57c9f0e7", size = 1639850, upload-time = "2025-06-20T10:21:35.639Z" }, - { url = "https://files.pythonhosted.org/packages/dc/b9/203a1cb19cc4ed42748dceb53d9cafe42ee34928f2d5c18cbe5f30d6a573/dependency_injector-4.48.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9a7862987b3dcab5ac4fd82f6bbda55d3b15af1ca7492757c428deccc3720140", size = 1732856, upload-time = "2025-06-20T10:21:37.459Z" }, - { url = "https://files.pythonhosted.org/packages/ea/6d/7bea5ea904465b4d04c7e3cddf669079d9abb0902d75b05417b5f884c570/dependency_injector-4.48.1-pp310-pypy310_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb33c6d6b6100564dfaee20f3f76a2756158cceff6499e9d0bca8290f8e5f124", size = 1822529, upload-time = "2025-06-20T10:21:39.363Z" }, - { url = "https://files.pythonhosted.org/packages/97/3d/7b16ec2cd0f4e7bba380084a713395d1483baa67e7ac63338f7b8a9a30a8/dependency_injector-4.48.1-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a659380386bd236579b7f82f51e97f6074d0c878a10db5b50086000de6ce3c28", size = 1731129, upload-time = "2025-06-20T10:21:41.241Z" }, - { url = "https://files.pythonhosted.org/packages/16/26/bf4612e9adf60fdbfd97360663d2b39ab17bd4308c7294dcfcd54546c701/dependency_injector-4.48.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:76774369c7268d5dd211af75abfcb4433d972760be90db342c2de325ee4c24a0", size = 1616736, upload-time = "2025-06-20T10:21:43.134Z" }, - { url = "https://files.pythonhosted.org/packages/04/de/92b98b96742fbc9c04273729cb14c744a97a8dc2ee3e0d12a0d3cc3945e2/dependency_injector-4.48.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:51f8d9d78a1a147908ed7929df628d859251a814e6a001973bd96ae2b5648760", size = 1734103, upload-time = "2025-06-20T10:21:44.748Z" }, - { url = "https://files.pythonhosted.org/packages/70/b7/31061c32c7d3e1f6c3e1fc71eb37d2ba4134e9bb2e50ad558bbff4aad9fa/dependency_injector-4.48.1-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c23ab17cd3e160de1fc5d78719bf86fbfc81c21c8ea02b43832a6a1e2c8a8d8", size = 1826656, upload-time = "2025-06-20T10:21:46.663Z" }, - { url = "https://files.pythonhosted.org/packages/57/a7/00b2a6e8769f3a5b248edf0b0d503289eb3516fa192f4e4cb368163f4a71/dependency_injector-4.48.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79be912a0dedb1341b1400018defca6a9966fcb8d4a84b325623fa57d3c08171", size = 1734610, upload-time = "2025-06-20T10:21:48.271Z" }, - { url = "https://files.pythonhosted.org/packages/43/b3/aa73fe301cf4dc006d2d6d82b6bf2f9a5776854f20e0aaa1122fa4fd1f2f/dependency_injector-4.48.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:65d9cf9f4eb31f837ed387210158e0003a4509478de1cbdc56c8439232f22ecd", size = 1618844, upload-time = "2025-06-20T10:21:50.271Z" }, -] - [[package]] name = "docutils" version = "0.21.2" @@ -530,7 +504,6 @@ version = "1.1.0" source = { editable = "." } dependencies = [ { name = "cryptography" }, - { name = "dependency-injector" }, { name = "fastapi" }, { name = "httpx" }, { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, @@ -561,7 +534,6 @@ docs = [ requires-dist = [ { name = "build", marker = "extra == 'dev'" }, { name = "cryptography", specifier = ">=45.0.3" }, - { name = "dependency-injector", specifier = ">=4.48.1" }, { name = "fastapi", specifier = ">=0.115.12" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "networkx", specifier = ">=3.4.2" }, From 8c7530cced390b9b21781ae7a63a190a94427fae Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 23 Sep 2025 16:28:13 -0400 Subject: [PATCH 06/53] updated effector, removed deprecated flush queue function calls. (prev commit also removed add_handler and register_handler functions from pipeline, handlers should now be passed in as a list in the NodeAssembler class) --- src/koi_net/behaviors.py | 1 - src/koi_net/context.py | 12 ++----- src/koi_net/core.py | 15 ++++++++- src/koi_net/effector.py | 35 +++++++-------------- src/koi_net/processor/knowledge_pipeline.py | 3 -- 5 files changed, 28 insertions(+), 38 deletions(-) diff --git a/src/koi_net/behaviors.py b/src/koi_net/behaviors.py index a74c717..0419813 100644 --- a/src/koi_net/behaviors.py +++ b/src/koi_net/behaviors.py @@ -42,7 +42,6 @@ def handshake_with(self, target: KoiNetNode): bundle=self.cache.read(self.identity.rid)), target=target ) - # self.ctx.event_queue.flush_webhook_queue(target) def identify_coordinators(self) -> list[KoiNetNode]: """Returns node's providing state for `orn:koi-net.node`.""" diff --git a/src/koi_net/context.py b/src/koi_net/context.py index 237d551..af3a0e0 100644 --- a/src/koi_net/context.py +++ b/src/koi_net/context.py @@ -39,6 +39,7 @@ def __init__( config: NodeConfig, cache: Cache, event_queue: EventQueue, + kobj_queue: KobjQueue, graph: NetworkGraph, request_handler: RequestHandler, resolver: NetworkResolver, @@ -47,14 +48,7 @@ def __init__( self.config = config self.cache = cache self.event_queue = event_queue + self.kobj_queue = kobj_queue self.graph = graph self.request_handler = request_handler - self.resolver = resolver - self._processor = None - - def set_processor(self, processor: KobjQueue): - self._processor = processor - - @property - def handle(self): - return self._processor.put_kobj \ No newline at end of file + self.resolver = resolver \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index bca0261..735dbd8 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -2,7 +2,8 @@ from rid_lib.ext import Cache from koi_net.behaviors import Behaviors from koi_net.config import NodeConfig -from koi_net.context import HandlerContext +from koi_net.context import ActionContext, HandlerContext +from koi_net.effector import Effector from koi_net.identity import NodeIdentity from koi_net.kobj_worker import KnowledgeProcessingWorker from koi_net.lifecycle import NodeLifecycle @@ -75,6 +76,8 @@ class NodeAssembler: forget_edge_on_node_deletion ] handler_context = HandlerContext + action_context = ActionContext + effector = Effector behaviors = Behaviors pipeline = KnowledgePipeline kobj_worker = KnowledgeProcessingWorker @@ -125,10 +128,20 @@ def create(cls) -> NodeContainer: config=config, cache=cache, event_queue=event_queue, + kobj_queue=kobj_queue, graph=graph, request_handler=request_handler, resolver=resolver ) + action_context = cls.action_context( + identity=identity + ) + effector = cls.effector( + cache=cache, + resolver=resolver, + kobj_queue=kobj_queue, + action_context=action_context + ) behaviors = cls.behaviors( cache=cache, identity=identity, diff --git a/src/koi_net/effector.py b/src/koi_net/effector.py index 9eb1f44..6e6dd87 100644 --- a/src/koi_net/effector.py +++ b/src/koi_net/effector.py @@ -5,12 +5,8 @@ from rid_lib.core import RID, RIDType from rid_lib.types import KoiNetNode from .network.resolver import NetworkResolver - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from .processor.kobj_queue import KobjQueue - from .context import ActionContext +from .processor.kobj_queue import KobjQueue +from .context import ActionContext logger = logging.getLogger(__name__) @@ -24,12 +20,12 @@ class Effector: cache: Cache resolver: NetworkResolver - processor: "KobjQueue | None" - action_context: "ActionContext | None" + kobj_queue: KobjQueue | None + action_context: ActionContext | None _action_table: dict[ type[RID], Callable[ - ["ActionContext", RID], + [ActionContext, RID], Bundle | None ] ] = dict() @@ -37,25 +33,16 @@ class Effector: def __init__( self, cache: Cache, - resolver: "NetworkResolver", - processor: "KobjQueue", - action_context: "ActionContext" + resolver: NetworkResolver, + kobj_queue: KobjQueue, + action_context: ActionContext ): self.cache = cache self.resolver = resolver - self.processor = processor + self.kobj_queue = kobj_queue self.action_context = action_context self._action_table = self.__class__._action_table.copy() - - # def set_processor(self, processor: "ProcessorInterface"): - # self.processor = processor - - # def set_resolver(self, resolver: "NetworkResolver"): - # self.resolver = resolver - - # def set_action_context(self, action_context: "ActionContext"): - # self.action_context = action_context - + @classmethod def register_default_action(cls, rid_type: RIDType): def decorator(func: Callable) -> Callable: @@ -156,7 +143,7 @@ def deref( and bundle is not None and source != BundleSource.CACHE ): - self.processor.put_kobj( + self.kobj_queue.put_kobj( bundle=bundle, source=source if type(source) is KoiNetNode else None ) diff --git a/src/koi_net/processor/knowledge_pipeline.py b/src/koi_net/processor/knowledge_pipeline.py index cf806cd..e107f31 100644 --- a/src/koi_net/processor/knowledge_pipeline.py +++ b/src/koi_net/processor/knowledge_pipeline.py @@ -1,6 +1,4 @@ import logging -from typing import Callable -from rid_lib.core import RIDType from rid_lib.types import KoiNetEdge, KoiNetNode from rid_lib.ext import Cache from ..protocol.event import EventType @@ -199,6 +197,5 @@ def process(self, kobj: KnowledgeObject): for node in kobj.network_targets: self.event_queue.push_event_to(kobj.normalized_event, node) - self.event_queue.flush_webhook_queue(node) kobj = self.call_handler_chain(HandlerType.Final, kobj) From 353398dd4d06626e0fd33fa098a3bc2f7164d6f0 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 23 Sep 2025 16:54:06 -0400 Subject: [PATCH 07/53] prototype interested RID types handler and configuration --- src/koi_net/config.py | 4 ++ src/koi_net/processor/default_handlers.py | 70 +++++++++++++++-------- src/koi_net/protocol/node.py | 6 +- 3 files changed, 53 insertions(+), 27 deletions(-) diff --git a/src/koi_net/config.py b/src/koi_net/config.py index 8281e21..7065dc6 100644 --- a/src/koi_net/config.py +++ b/src/koi_net/config.py @@ -1,4 +1,5 @@ import os +from rid_lib import RIDType from ruamel.yaml import YAML from pydantic import BaseModel, Field, PrivateAttr from dotenv import load_dotenv @@ -30,6 +31,9 @@ class KoiNetConfig(BaseModel): node_rid: KoiNetNode | None = None node_profile: NodeProfile + rid_types_of_interest: list[RIDType] = Field( + default_factory=lambda: [KoiNetNode]) + cache_directory_path: str = ".rid_cache" event_queues_path: str = "event_queues.json" private_key_pem_path: str = "priv_key.pem" diff --git a/src/koi_net/processor/default_handlers.py b/src/koi_net/processor/default_handlers.py index a3d5861..e87f1ae 100644 --- a/src/koi_net/processor/default_handlers.py +++ b/src/koi_net/processor/default_handlers.py @@ -143,7 +143,7 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): edge_profile.status = EdgeStatus.APPROVED updated_bundle = Bundle.generate(kobj.rid, edge_profile.model_dump()) - ctx.handle(bundle=updated_bundle, event_type=EventType.UPDATE) + ctx.kobj_queue.put_kobj(bundle=updated_bundle, event_type=EventType.UPDATE) return elif edge_profile.target == ctx.identity.rid: @@ -162,44 +162,66 @@ def coordinator_contact(ctx: HandlerContext, kobj: KnowledgeObject): handler will propose a new edge subscribing to future node events, and fetch existing nodes to catch up to the current state. """ - node_profile = kobj.bundle.validate_contents(NodeProfile) - - # looking for event provider of nodes - if KoiNetNode not in node_profile.provides.event: - return - - # prevents coordinators from attempting to form a self loop + # prevents nodes from attempting to form a self loop if kobj.rid == ctx.identity.rid: return - # already have an edge established - if ctx.graph.get_edge( - source=kobj.rid, - target=ctx.identity.rid, - ) is not None: + node_profile = kobj.bundle.validate_contents(NodeProfile) + + available_rid_types = list( + set(ctx.config.koi_net.rid_types_of_interest) & + set(node_profile.provides.event) + ) + + if not available_rid_types: return logger.info("Identified a coordinator!") logger.info("Proposing new edge") - if ctx.identity.profile.node_type == NodeType.FULL: - edge_type = EdgeType.WEBHOOK + # already have an edge established + edge_rid = ctx.graph.get_edge( + source=kobj.rid, + target=ctx.identity.rid, + ) + + if edge_rid: + prev_edge_bundle = ctx.cache.read(edge_rid) + edge_profile = prev_edge_bundle.validate_contents(EdgeProfile) + + if set(edge_profile.rid_types) == set(available_rid_types): + # no change in rid types + return + + edge_profile.rid_types = available_rid_types + edge_profile.status = EdgeStatus.PROPOSED + else: - edge_type = EdgeType.POLL + source = kobj.rid + target = ctx.identity.rid + if ctx.identity.profile.node_type == NodeType.FULL: + edge_type = EdgeType.WEBHOOK + else: + edge_type = EdgeType.POLL + + edge_rid = KoiNetEdge(sha256_hash(str(source) + str(target))) + edge_profile = EdgeProfile( + source=source, + target=target, + rid_types=available_rid_types, + edge_type=edge_type, + status=EdgeStatus.PROPOSED + ) # queued for processing - ctx.handle(bundle=generate_edge_bundle( - source=kobj.rid, - target=ctx.identity.rid, - edge_type=edge_type, - rid_types=[KoiNetNode] - )) + edge_bundle = Bundle.generate(edge_rid, edge_profile.model_dump()) + ctx.kobj_queue.put_kobj(bundle=edge_bundle) logger.info("Catching up on network state") payload = ctx.request_handler.fetch_rids( node=kobj.rid, - rid_types=[KoiNetNode] + rid_types=available_rid_types ) for rid in payload.rids: if rid == ctx.identity.rid: @@ -211,7 +233,7 @@ def coordinator_contact(ctx: HandlerContext, kobj: KnowledgeObject): # marked as external since we are handling RIDs from another node # will fetch remotely instead of checking local cache - ctx.handle(rid=rid, source=kobj.rid) + ctx.kobj_queue.put_kobj(rid=rid, source=kobj.rid) logger.info("Done") diff --git a/src/koi_net/protocol/node.py b/src/koi_net/protocol/node.py index c60d9d2..20a6fc0 100644 --- a/src/koi_net/protocol/node.py +++ b/src/koi_net/protocol/node.py @@ -1,5 +1,5 @@ from enum import StrEnum -from pydantic import BaseModel +from pydantic import BaseModel, Field from rid_lib import RIDType @@ -8,8 +8,8 @@ class NodeType(StrEnum): PARTIAL = "PARTIAL" class NodeProvides(BaseModel): - event: list[RIDType] = [] - state: list[RIDType] = [] + event: list[RIDType] = Field(default_factory=list) + state: list[RIDType] = Field(default_factory=list) class NodeProfile(BaseModel): base_url: str | None = None From 48ae6ff854db1d1f5734b7fd6dff7a4bd2a1c789 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 30 Sep 2025 19:32:31 -0400 Subject: [PATCH 08/53] factored out handshake into its own component, updated examples, added JSON schemas for all protocol objects --- .gitignore | 4 +- container_test.py | 4 -- examples/coordinator.py | 43 +++++++++++-------- examples/partial.py | 8 +--- schemas/bundle.schema.json | 16 +++++++ schemas/bundles_payload.schema.json | 32 ++++++++++++++ schemas/edge_profile.schema.json | 49 ++++++++++++++++++++++ schemas/error_response.schema.json | 23 ++++++++++ schemas/event.schema.json | 33 +++++++++++++++ schemas/events_payload.schema.json | 20 +++++++++ schemas/fetch_bundles.schema.json | 20 +++++++++ schemas/fetch_manifests.schema.json | 25 +++++++++++ schemas/fetch_rids.schema.json | 19 +++++++++ schemas/manifest.schema.json | 21 ++++++++++ schemas/manifests_payload.schema.json | 26 ++++++++++++ schemas/node_profile.schema.json | 51 +++++++++++++++++++++++ schemas/poll_events.schema.json | 16 +++++++ schemas/rids_payload.schema.json | 20 +++++++++ schemas/signed_envelope.schema.json | 33 +++++++++++++++ schemas/unsigned_envelope.json | 29 +++++++++++++ src/koi_net/behaviors.py | 19 --------- src/koi_net/cache_adapter.py | 10 ----- src/koi_net/core.py | 25 +++++++---- src/koi_net/handshaker.py | 39 +++++++++++++++++ src/koi_net/lifecycle.py | 19 ++++----- src/koi_net/network/error_handler.py | 9 ++-- src/koi_net/processor/default_handlers.py | 2 +- 27 files changed, 532 insertions(+), 83 deletions(-) delete mode 100644 container_test.py create mode 100644 schemas/bundle.schema.json create mode 100644 schemas/bundles_payload.schema.json create mode 100644 schemas/edge_profile.schema.json create mode 100644 schemas/error_response.schema.json create mode 100644 schemas/event.schema.json create mode 100644 schemas/events_payload.schema.json create mode 100644 schemas/fetch_bundles.schema.json create mode 100644 schemas/fetch_manifests.schema.json create mode 100644 schemas/fetch_rids.schema.json create mode 100644 schemas/manifest.schema.json create mode 100644 schemas/manifests_payload.schema.json create mode 100644 schemas/node_profile.schema.json create mode 100644 schemas/poll_events.schema.json create mode 100644 schemas/rids_payload.schema.json create mode 100644 schemas/signed_envelope.schema.json create mode 100644 schemas/unsigned_envelope.json delete mode 100644 src/koi_net/cache_adapter.py create mode 100644 src/koi_net/handshaker.py diff --git a/.gitignore b/.gitignore index 3706b3c..b5af389 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,5 @@ rid-lib __pycache__ -*.json *.pem *.yaml venv @@ -9,4 +8,5 @@ prototypes .vscode dist/ docs/ -tests/ \ No newline at end of file +tests/ +.rid_cache/ \ No newline at end of file diff --git a/container_test.py b/container_test.py deleted file mode 100644 index 21c9f79..0000000 --- a/container_test.py +++ /dev/null @@ -1,4 +0,0 @@ -from koi_net.core import NodeContainer - -node = NodeContainer() -# node. \ No newline at end of file diff --git a/examples/coordinator.py b/examples/coordinator.py index 3e941be..369ab8e 100644 --- a/examples/coordinator.py +++ b/examples/coordinator.py @@ -2,11 +2,11 @@ from rich.logging import RichHandler from pydantic import Field from rid_lib.types import KoiNetNode, KoiNetEdge -from koi_net.config import NodeConfig, KoiNetConfig +from koi_net.config import NodeConfig, KoiNetConfig, ServerConfig +from koi_net.core import NodeAssembler from koi_net.protocol.node import NodeProfile, NodeProvides, NodeType -from koi_net import NodeContainer from koi_net.context import HandlerContext -from koi_net.processor.handler import HandlerType +from koi_net.processor.handler import HandlerType, KnowledgeHandler from koi_net.processor.knowledge_object import KnowledgeObject from koi_net.protocol.event import Event, EventType from koi_net.protocol.edge import EdgeType, generate_edge_bundle @@ -21,7 +21,11 @@ logging.getLogger("koi_net").setLevel(logging.DEBUG) logger = logging.getLogger(__name__) + class CoordinatorConfig(NodeConfig): + server: ServerConfig = Field(default_factory=lambda: + ServerConfig(port=8080) + ) koi_net: KoiNetConfig = Field(default_factory = lambda: KoiNetConfig( node_name="coordinator", @@ -32,18 +36,13 @@ class CoordinatorConfig(NodeConfig): state=[KoiNetNode, KoiNetEdge] ) ), - cache_directory_path=".coordinator_rid_cache", - event_queues_path="coordinator_event_queues.json", - private_key_pem_path="coordinator_priv_key.pem" + rid_types_of_interest=[KoiNetNode, KoiNetEdge] ) ) - -node = NodeContainer( - config=CoordinatorConfig.load_from_yaml("coordinator_config.yaml"), - use_kobj_processor_thread=True -) -@node.kobj_queue_manager.pipeline.register_handler(HandlerType.Network, rid_types=[KoiNetNode]) +@KnowledgeHandler.create( + HandlerType.Network, + rid_types=[KoiNetNode]) def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): logger.info("Handling node handshake") @@ -52,11 +51,10 @@ def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): return logger.info("Sharing this node's bundle with peer") - identity_bundle = ctx.effector.deref(ctx.identity.rid) + identity_bundle = ctx.cache.read(ctx.identity.rid) ctx.event_queue.push_event_to( event=Event.from_bundle(EventType.NEW, identity_bundle), - target=kobj.rid, - flush=True + target=kobj.rid ) logger.info("Proposing new edge") @@ -69,8 +67,17 @@ def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): rid_types=[KoiNetNode, KoiNetEdge] ) - ctx.handle(rid=edge_bundle.rid, event_type=EventType.FORGET) - ctx.handle(bundle=edge_bundle) - + ctx.kobj_queue.put_kobj(rid=edge_bundle.rid, event_type=EventType.FORGET) + ctx.kobj_queue.put_kobj(bundle=edge_bundle) + +class CoordinatorNodeAssembler(NodeAssembler): + config = CoordinatorConfig + knowledge_handlers = [ + *NodeAssembler.knowledge_handlers, + handshake_handler + ] + + if __name__ == "__main__": + node = CoordinatorNodeAssembler.create() node.server.run() \ No newline at end of file diff --git a/examples/partial.py b/examples/partial.py index a9248e2..d8158b9 100644 --- a/examples/partial.py +++ b/examples/partial.py @@ -1,7 +1,6 @@ import logging from pydantic import Field from rich.logging import RichHandler -from koi_net import NodeContainer from koi_net.core import NodeAssembler from koi_net.protocol.node import NodeProfile, NodeType from koi_net.config import NodeConfig, KoiNetConfig @@ -23,10 +22,7 @@ class PartialNodeConfig(NodeConfig): node_name="partial", node_profile=NodeProfile( node_type=NodeType.PARTIAL - ), - cache_directory_path=".partial_rid_cache", - event_queues_path="partial_event_queues.json", - private_key_pem_path="partial_priv_key.pem" + ) ) ) @@ -36,4 +32,4 @@ class PartialNodeAssembler(NodeAssembler): if __name__ == "__main__": node = PartialNodeAssembler.create() - node.server.run() \ No newline at end of file + node.poller.run() \ No newline at end of file diff --git a/schemas/bundle.schema.json b/schemas/bundle.schema.json new file mode 100644 index 0000000..48ccfed --- /dev/null +++ b/schemas/bundle.schema.json @@ -0,0 +1,16 @@ +{ + "title": "Bundle", + "type": "object", + "properties": { + "manifest": { + "$ref": "./manifest.schema.json" + }, + "contents": { + "type": "object" + } + }, + "required": [ + "manifest", + "contents" + ] +} \ No newline at end of file diff --git a/schemas/bundles_payload.schema.json b/schemas/bundles_payload.schema.json new file mode 100644 index 0000000..69577a6 --- /dev/null +++ b/schemas/bundles_payload.schema.json @@ -0,0 +1,32 @@ +{ + "title": "Bundles Payload", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "bundles_payload" + }, + "bundles": { + "type": "array", + "items": { + "$ref": "./bundle.schema.json" + } + }, + "not_found": { + "type": "array", + "items": { + "type": "string" + } + }, + "deferred": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "type", + "bundles" + ] +} \ No newline at end of file diff --git a/schemas/edge_profile.schema.json b/schemas/edge_profile.schema.json new file mode 100644 index 0000000..9f0d078 --- /dev/null +++ b/schemas/edge_profile.schema.json @@ -0,0 +1,49 @@ +{ + "$defs": { + "EdgeStatus": { + "title": "Edge Status", + "type": "string", + "enum": [ + "PROPOSED", + "APPROVED" + ] + }, + "EdgeType": { + "title": "Edge Type", + "type": "string", + "enum": [ + "WEBHOOK", + "POLL" + ] + } + }, + "title": "Edge Profile", + "type": "object", + "properties": { + "source": { + "type": "string" + }, + "target": { + "type": "string" + }, + "edge_type": { + "$ref": "#/$defs/EdgeType" + }, + "status": { + "$ref": "#/$defs/EdgeStatus" + }, + "rid_types": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "source", + "target", + "edge_type", + "status", + "rid_types" + ] +} \ No newline at end of file diff --git a/schemas/error_response.schema.json b/schemas/error_response.schema.json new file mode 100644 index 0000000..467d394 --- /dev/null +++ b/schemas/error_response.schema.json @@ -0,0 +1,23 @@ +{ + "title": "Error Response", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "error_response" + }, + "error": { + "type": "string", + "enum": [ + "unknown_node", + "invalid_key", + "invalid_signature", + "invalid_target" + ] + } + }, + "required": [ + "type", + "error" + ] +} \ No newline at end of file diff --git a/schemas/event.schema.json b/schemas/event.schema.json new file mode 100644 index 0000000..b378bd2 --- /dev/null +++ b/schemas/event.schema.json @@ -0,0 +1,33 @@ +{ + "$defs": { + "EventType": { + "title": "EventType", + "type": "string", + "enum": [ + "NEW", + "UPDATE", + "FORGET" + ] + } + }, + "title": "Event", + "type": "object", + "properties": { + "rid": { + "type": "string" + }, + "event_type": { + "$ref": "#/$defs/EventType" + }, + "manifest": { + "$ref": "./manifest.schema.json" + }, + "contents": { + "type": "object" + } + }, + "required": [ + "rid", + "event_type" + ] +} \ No newline at end of file diff --git a/schemas/events_payload.schema.json b/schemas/events_payload.schema.json new file mode 100644 index 0000000..20d395a --- /dev/null +++ b/schemas/events_payload.schema.json @@ -0,0 +1,20 @@ +{ + "title": "Events Payload", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "events_payload" + }, + "events": { + "type": "array", + "items": { + "$ref": "./event.schema.json" + } + } + }, + "required": [ + "type", + "events" + ] +} \ No newline at end of file diff --git a/schemas/fetch_bundles.schema.json b/schemas/fetch_bundles.schema.json new file mode 100644 index 0000000..17c92ca --- /dev/null +++ b/schemas/fetch_bundles.schema.json @@ -0,0 +1,20 @@ +{ + "title": "Fetch Bundles", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "fetch_bundles" + }, + "rids": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "type", + "rids" + ] +} \ No newline at end of file diff --git a/schemas/fetch_manifests.schema.json b/schemas/fetch_manifests.schema.json new file mode 100644 index 0000000..9f4c468 --- /dev/null +++ b/schemas/fetch_manifests.schema.json @@ -0,0 +1,25 @@ +{ + "title": "Fetch Manifests", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "fetch_manifests" + }, + "rid_types": { + "type": "array", + "items": { + "type": "string" + } + }, + "rids": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "type" + ] +} \ No newline at end of file diff --git a/schemas/fetch_rids.schema.json b/schemas/fetch_rids.schema.json new file mode 100644 index 0000000..cf846c8 --- /dev/null +++ b/schemas/fetch_rids.schema.json @@ -0,0 +1,19 @@ +{ + "title": "Fetch RIDs", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "fetch_rids" + }, + "rid_types": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "type" + ] +} \ No newline at end of file diff --git a/schemas/manifest.schema.json b/schemas/manifest.schema.json new file mode 100644 index 0000000..639cf65 --- /dev/null +++ b/schemas/manifest.schema.json @@ -0,0 +1,21 @@ +{ + "title": "Manifest", + "type": "object", + "properties": { + "rid": { + "type": "string" + }, + "timestamp": { + "type": "string", + "format": "date-time" + }, + "sha256_hash": { + "type": "string" + } + }, + "required": [ + "rid", + "timestamp", + "sha256_hash" + ] +} \ No newline at end of file diff --git a/schemas/manifests_payload.schema.json b/schemas/manifests_payload.schema.json new file mode 100644 index 0000000..4dad3e8 --- /dev/null +++ b/schemas/manifests_payload.schema.json @@ -0,0 +1,26 @@ +{ + "title": "Manifests Payload", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "manifests_payload" + }, + "manifests": { + "type": "array", + "items": { + "$ref": "./manifest.schema.json" + } + }, + "not_found": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "type", + "manifests" + ] +} \ No newline at end of file diff --git a/schemas/node_profile.schema.json b/schemas/node_profile.schema.json new file mode 100644 index 0000000..832fc20 --- /dev/null +++ b/schemas/node_profile.schema.json @@ -0,0 +1,51 @@ +{ + "$defs": { + "NodeType": { + "title": "Node Type", + "type": "string", + "enum": [ + "FULL", + "PARTIAL" + ] + } + }, + "title": "Node Profile", + "type": "object", + "properties": { + "node_type": { + "$ref": "#/$defs/NodeType" + }, + "base_url": { + "type": "string" + }, + "provides": { + "type": "object", + "properties": { + "event": { + "type": "array", + "items": { + "type": "string" + } + }, + "state": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "event", + "state" + ] + }, + "public_key": { + "type": "string" + } + }, + "required": [ + "node_type", + "provides", + "public_key" + ] +} \ No newline at end of file diff --git a/schemas/poll_events.schema.json b/schemas/poll_events.schema.json new file mode 100644 index 0000000..6e5c458 --- /dev/null +++ b/schemas/poll_events.schema.json @@ -0,0 +1,16 @@ +{ + "title": "Poll Events", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "poll_events" + }, + "limit": { + "type": "integer" + } + }, + "required": [ + "type" + ] +} \ No newline at end of file diff --git a/schemas/rids_payload.schema.json b/schemas/rids_payload.schema.json new file mode 100644 index 0000000..296a46c --- /dev/null +++ b/schemas/rids_payload.schema.json @@ -0,0 +1,20 @@ +{ + "title": "RIDs Payload", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "rids_payload" + }, + "rids": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "type", + "rids" + ] +} \ No newline at end of file diff --git a/schemas/signed_envelope.schema.json b/schemas/signed_envelope.schema.json new file mode 100644 index 0000000..3d48ce6 --- /dev/null +++ b/schemas/signed_envelope.schema.json @@ -0,0 +1,33 @@ +{ + "title": "Signed Envelope", + "type": "object", + "properties": { + "payload": { + "anyOf": [ + {"$ref": "./poll_events.schema.json"}, + {"$ref": "./fetch_rids.schema.json"}, + {"$ref": "./fetch_manifests.schema.json"}, + {"$ref": "./fetch_bundles.schema.json"}, + {"$ref": "./rids_payload.schema.json"}, + {"$ref": "./manifests_payload.schema.json"}, + {"$ref": "./bundles_payload.schema.json"}, + {"$ref": "./events_payload.schema.json"} + ] + }, + "source_node": { + "type": "string" + }, + "target_node": { + "type": "string" + }, + "signature": { + "type": "string" + } + }, + "required": [ + "payload", + "source_node", + "target_node", + "signature" + ] +} \ No newline at end of file diff --git a/schemas/unsigned_envelope.json b/schemas/unsigned_envelope.json new file mode 100644 index 0000000..e9b1301 --- /dev/null +++ b/schemas/unsigned_envelope.json @@ -0,0 +1,29 @@ +{ + "title": "Unsigned Envelope", + "type": "object", + "properties": { + "payload": { + "anyOf": [ + {"$ref": "./poll_events.schema.json"}, + {"$ref": "./fetch_rids.schema.json"}, + {"$ref": "./fetch_manifests.schema.json"}, + {"$ref": "./fetch_bundles.schema.json"}, + {"$ref": "./rids_payload.schema.json"}, + {"$ref": "./manifests_payload.schema.json"}, + {"$ref": "./bundles_payload.schema.json"}, + {"$ref": "./events_payload.schema.json"} + ] + }, + "source_node": { + "type": "string" + }, + "target_node": { + "type": "string" + } + }, + "required": [ + "payload", + "source_node", + "target_node" + ] +} \ No newline at end of file diff --git a/src/koi_net/behaviors.py b/src/koi_net/behaviors.py index 0419813..229316d 100644 --- a/src/koi_net/behaviors.py +++ b/src/koi_net/behaviors.py @@ -24,25 +24,6 @@ def __init__(self, cache: Cache, identity: NodeIdentity, event_queue: EventQueue self.request_handler = request_handler self.kobj_queue = kobj_queue - def handshake_with(self, target: KoiNetNode): - """Initiates a handshake with target node. - Pushes successive `FORGET` and `NEW` events to target node to - reset the target's cache in case it already knew this node. - """ - logger.debug(f"Initiating handshake with {target}") - self.event_queue.push_event_to( - Event.from_rid( - event_type=EventType.FORGET, - rid=self.identity.rid), - target=target - ) - self.event_queue.push_event_to( - event=Event.from_bundle( - event_type=EventType.NEW, - bundle=self.cache.read(self.identity.rid)), - target=target - ) - def identify_coordinators(self) -> list[KoiNetNode]: """Returns node's providing state for `orn:koi-net.node`.""" return self.resolver.get_state_providers(KoiNetNode) diff --git a/src/koi_net/cache_adapter.py b/src/koi_net/cache_adapter.py deleted file mode 100644 index 8abc936..0000000 --- a/src/koi_net/cache_adapter.py +++ /dev/null @@ -1,10 +0,0 @@ -from rid_lib.ext import Cache -from koi_net.config import NodeConfig - -class CacheProvider(Cache): - def __init__(self, config: NodeConfig): - self.config = config - - @property - def directory_path(self): - return self.config.koi_net.cache_directory_path \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 735dbd8..bf8a6a0 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -4,6 +4,7 @@ from koi_net.config import NodeConfig from koi_net.context import ActionContext, HandlerContext from koi_net.effector import Effector +from koi_net.handshaker import Handshaker from koi_net.identity import NodeIdentity from koi_net.kobj_worker import KnowledgeProcessingWorker from koi_net.lifecycle import NodeLifecycle @@ -19,7 +20,7 @@ basic_manifest_handler, basic_network_output_filter, basic_rid_handler, - coordinator_contact, + node_contact_handler, edge_negotiation_handler, forget_edge_on_node_deletion, secure_profile_handler @@ -30,6 +31,7 @@ from koi_net.secure import Secure from koi_net.server import NodeServer + @dataclass class NodeContainer: poll_event_buf: PollEventBuffer @@ -63,6 +65,7 @@ class NodeAssembler: identity = NodeIdentity graph = NetworkGraph secure = Secure + handshaker = Handshaker request_handler = RequestHandler response_handler = ResponseHandler resolver = NetworkResolver @@ -71,7 +74,7 @@ class NodeAssembler: basic_manifest_handler, secure_profile_handler, edge_negotiation_handler, - coordinator_contact, + node_contact_handler, basic_network_output_filter, forget_edge_on_node_deletion ] @@ -108,10 +111,20 @@ def create(cls) -> NodeContainer: cache=cache, config=config ) + handshaker = cls.handshaker( + cache=cache, + identity=identity, + event_queue=event_queue + ) + error_handler = cls.error_handler( + kobj_queue=kobj_queue, + handshaker=handshaker + ) request_handler = cls.request_handler( cache=cache, identity=identity, - secure=secure + secure=secure, + error_handler=error_handler ) response_handler = cls.response_handler( cache=cache @@ -169,10 +182,6 @@ def create(cls) -> NodeContainer: request_handler=request_handler, poll_event_buf=poll_event_buffer ) - error_handler = cls.error_handler( - kobj_queue=kobj_queue, - behaviors=behaviors - ) lifecycle = cls.lifecycle( config=config, identity=identity, @@ -182,7 +191,7 @@ def create(cls) -> NodeContainer: event_queue=event_queue, event_worker=event_worker, cache=cache, - behaviors=behaviors + handshaker=behaviors ) server = cls.server( config=config, diff --git a/src/koi_net/handshaker.py b/src/koi_net/handshaker.py new file mode 100644 index 0000000..d1c99eb --- /dev/null +++ b/src/koi_net/handshaker.py @@ -0,0 +1,39 @@ +from logging import getLogger +from rid_lib.ext import Cache +from rid_lib.types import KoiNetNode +from koi_net.identity import NodeIdentity +from koi_net.network.event_queue import EventQueue +from .protocol.event import Event, EventType + +logger = getLogger(__name__) + + +class Handshaker: + def __init__( + self, + cache: Cache, + identity: NodeIdentity, + event_queue: EventQueue + ): + self.cache = cache + self.identity = identity + self.event_queue = event_queue + + def handshake_with(self, target: KoiNetNode): + """Initiates a handshake with target node. + Pushes successive `FORGET` and `NEW` events to target node to + reset the target's cache in case it already knew this node. + """ + logger.debug(f"Initiating handshake with {target}") + self.event_queue.push_event_to( + Event.from_rid( + event_type=EventType.FORGET, + rid=self.identity.rid), + target=target + ) + self.event_queue.push_event_to( + event=Event.from_bundle( + event_type=EventType.NEW, + bundle=self.cache.read(self.identity.rid)), + target=target + ) \ No newline at end of file diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index afdb196..e70cc31 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -4,7 +4,7 @@ from rid_lib.ext import Bundle, Cache from rid_lib.types import KoiNetNode -from koi_net.behaviors import Behaviors +from koi_net.handshaker import Handshaker from koi_net.kobj_worker import KnowledgeProcessingWorker from koi_net.models import END from koi_net.network.event_queue import EventQueue @@ -40,7 +40,7 @@ def __init__( event_queue: EventQueue, event_worker: EventProcessingWorker, cache: Cache, - behaviors: Behaviors + handshaker: Handshaker ): self.config = config self.identity = identity @@ -50,8 +50,7 @@ def __init__( self.event_queue = event_queue self.event_worker = event_worker self.cache = cache - - self.behaviors = behaviors + self.handshaker = handshaker @contextmanager def run(self): @@ -93,7 +92,7 @@ def start(self): self.event_worker.thread.start() self.graph.generate() - # refresh to reflect changes (if any) in config.yaml + # refresh to reflect changes (if any) in config.yaml self.kobj_queue.put_kobj(bundle=Bundle.generate( rid=self.identity.rid, @@ -101,18 +100,18 @@ def start(self): )) logger.debug("Waiting for kobj queue to empty") - - # TODO: REFACTOR self.kobj_queue.q.join() # TODO: FACTOR OUT BEHAVIOR if not self.graph.get_neighbors() and self.config.koi_net.first_contact.rid: logger.debug(f"I don't have any neighbors, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") - self.behaviors.handshake_with(self.config.koi_net.first_contact.rid) + self.handshaker.handshake_with(self.config.koi_net.first_contact.rid) + + - for coordinator in self.behaviors.identify_coordinators(): - self.behaviors.catch_up_with(coordinator, rid_types=[KoiNetNode]) + for coordinator in self.handshaker.identify_coordinators(): + self.handshaker.catch_up_with(coordinator, rid_types=[KoiNetNode]) def stop(self): diff --git a/src/koi_net/network/error_handler.py b/src/koi_net/network/error_handler.py index 8cacbcc..8ee6926 100644 --- a/src/koi_net/network/error_handler.py +++ b/src/koi_net/network/error_handler.py @@ -1,6 +1,5 @@ from logging import getLogger -from typing import Callable -from koi_net.behaviors import Behaviors +from koi_net.handshaker import Handshaker from koi_net.protocol.errors import ErrorType from koi_net.protocol.event import EventType from rid_lib.types import KoiNetNode @@ -17,10 +16,10 @@ class ErrorHandler: def __init__( self, kobj_queue: KobjQueue, - behaviors: Behaviors + handshaker: Handshaker ): self.kobj_queue = kobj_queue - self.behaviors = behaviors + self.handshaker = handshaker self.timeout_counter = {} def handle_connection_error(self, node: KoiNetNode): @@ -46,7 +45,7 @@ def handle_protocol_error( match error_type: case ErrorType.UnknownNode: logger.info("Peer doesn't know me, attempting handshake...") - self.behaviors.handshake_with(node) + self.handshaker.handshake_with(node) case ErrorType.InvalidKey: ... case ErrorType.InvalidSignature: ... diff --git a/src/koi_net/processor/default_handlers.py b/src/koi_net/processor/default_handlers.py index e87f1ae..88e52dd 100644 --- a/src/koi_net/processor/default_handlers.py +++ b/src/koi_net/processor/default_handlers.py @@ -154,7 +154,7 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): # Network handlers @KnowledgeHandler.create(HandlerType.Network, rid_types=[KoiNetNode]) -def coordinator_contact(ctx: HandlerContext, kobj: KnowledgeObject): +def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): """Makes contact with identified coordinator nodes. When an incoming node knowledge object is identified as a provider From 5ae8948e63f08bb0e07287b42fd86b580fd8f49e Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 30 Sep 2025 21:11:03 -0400 Subject: [PATCH 09/53] bug fixes, and decreased max wait time for demo --- src/koi_net/context.py | 2 +- src/koi_net/core.py | 3 ++- src/koi_net/lifecycle.py | 11 ++++++++--- src/koi_net/network/request_handler.py | 12 ++++-------- src/koi_net/processor/default_handlers.py | 2 +- src/koi_net/processor/event_worker.py | 2 +- 6 files changed, 17 insertions(+), 15 deletions(-) diff --git a/src/koi_net/context.py b/src/koi_net/context.py index af3a0e0..7f70806 100644 --- a/src/koi_net/context.py +++ b/src/koi_net/context.py @@ -28,10 +28,10 @@ class HandlerContext: config: NodeConfig cache: Cache event_queue: EventQueue + kobj_queue: KobjQueue graph: NetworkGraph request_handler: RequestHandler resolver: NetworkResolver - _processor: KobjQueue | None def __init__( self, diff --git a/src/koi_net/core.py b/src/koi_net/core.py index bf8a6a0..5ef4f52 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -191,7 +191,8 @@ def create(cls) -> NodeContainer: event_queue=event_queue, event_worker=event_worker, cache=cache, - handshaker=behaviors + handshaker=handshaker, + behaviors=behaviors ) server = cls.server( config=config, diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index e70cc31..1b3a8a1 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -4,6 +4,7 @@ from rid_lib.ext import Bundle, Cache from rid_lib.types import KoiNetNode +from koi_net.behaviors import Behaviors from koi_net.handshaker import Handshaker from koi_net.kobj_worker import KnowledgeProcessingWorker from koi_net.models import END @@ -29,6 +30,8 @@ class NodeLifecycle: event_queue: EventQueue event_worker: EventProcessingWorker cache: Cache + handshaker: Handshaker + behaviors: Behaviors def __init__( self, @@ -40,7 +43,8 @@ def __init__( event_queue: EventQueue, event_worker: EventProcessingWorker, cache: Cache, - handshaker: Handshaker + handshaker: Handshaker, + behaviors: Behaviors ): self.config = config self.identity = identity @@ -51,6 +55,7 @@ def __init__( self.event_worker = event_worker self.cache = cache self.handshaker = handshaker + self.behaviors = behaviors @contextmanager def run(self): @@ -110,8 +115,8 @@ def start(self): - for coordinator in self.handshaker.identify_coordinators(): - self.handshaker.catch_up_with(coordinator, rid_types=[KoiNetNode]) + for coordinator in self.behaviors.identify_coordinators(): + self.behaviors.catch_up_with(coordinator, rid_types=[KoiNetNode]) def stop(self): diff --git a/src/koi_net/network/request_handler.py b/src/koi_net/network/request_handler.py index 3909be2..15294a9 100644 --- a/src/koi_net/network/request_handler.py +++ b/src/koi_net/network/request_handler.py @@ -28,10 +28,7 @@ ) from ..protocol.node import NodeProfile, NodeType from ..secure import Secure - -from typing import TYPE_CHECKING -if TYPE_CHECKING: - from .error_handler import ErrorHandler +from .error_handler import ErrorHandler logger = logging.getLogger(__name__) @@ -60,19 +57,18 @@ class RequestHandler: cache: Cache identity: NodeIdentity secure: Secure - error_handler: "ErrorHandler" + error_handler: ErrorHandler def __init__( self, cache: Cache, identity: NodeIdentity, - secure: Secure + secure: Secure, + error_handler: ErrorHandler ): self.cache = cache self.identity = identity self.secure = secure - - def set_error_handler(self, error_handler: "ErrorHandler"): self.error_handler = error_handler def get_url(self, node_rid: KoiNetNode) -> str: diff --git a/src/koi_net/processor/default_handlers.py b/src/koi_net/processor/default_handlers.py index 88e52dd..bd6c3f2 100644 --- a/src/koi_net/processor/default_handlers.py +++ b/src/koi_net/processor/default_handlers.py @@ -295,4 +295,4 @@ def forget_edge_on_node_deletion(ctx: HandlerContext, kobj: KnowledgeObject): if kobj.rid in (edge_profile.source, edge_profile.target): logger.debug("Identified edge with forgotten node") - ctx.handle(rid=edge_rid, event_type=EventType.FORGET) \ No newline at end of file + ctx.kobj_queue.put_kobj(rid=edge_rid, event_type=EventType.FORGET) \ No newline at end of file diff --git a/src/koi_net/processor/event_worker.py b/src/koi_net/processor/event_worker.py index 237848e..063b992 100644 --- a/src/koi_net/processor/event_worker.py +++ b/src/koi_net/processor/event_worker.py @@ -31,7 +31,7 @@ def __init__( poll_event_buf: PollEventBuffer, queue_timeout: float = 0.1, max_buf_len: int = 5, - max_wait_time: float = 10.0 + max_wait_time: float = 1.0 ): self.event_queue = event_queue self.request_handler = request_handler From 42df538e35b7f0ec5993bc339cc40803b07f8ca0 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 30 Sep 2025 23:59:41 -0400 Subject: [PATCH 10/53] added better conditions for determining when no coordinator is known, event worker flushes buffers before shutting down now --- src/koi_net/lifecycle.py | 7 +++++-- src/koi_net/processor/event_worker.py | 4 +++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index 1b3a8a1..8bb8f8e 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -108,8 +108,11 @@ def start(self): self.kobj_queue.q.join() # TODO: FACTOR OUT BEHAVIOR - if not self.graph.get_neighbors() and self.config.koi_net.first_contact.rid: - logger.debug(f"I don't have any neighbors, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") + + coordinators = self.graph.get_neighbors(direction="in", allowed_type=KoiNetNode) + + if len(coordinators) == 0 and self.config.koi_net.first_contact.rid: + logger.debug(f"I don't have any edges with coordinators, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") self.handshaker.handshake_with(self.config.koi_net.first_contact.rid) diff --git a/src/koi_net/processor/event_worker.py b/src/koi_net/processor/event_worker.py index 063b992..c27e368 100644 --- a/src/koi_net/processor/event_worker.py +++ b/src/koi_net/processor/event_worker.py @@ -87,7 +87,9 @@ def run(self): try: if item is END: - logger.info("Received 'END' signal, shutting down...") + logger.info("Received 'END' signal, flushing buffer...") + for target in self.event_buffer.keys(): + self.flush_buffer(target, self.event_buffer[target]) return logger.info(f"Dequeued {item.event!r} -> {item.target!r}") From 6a93b380236238ed97f694838b006879f9be90aa Mon Sep 17 00:00:00 2001 From: lukvmil Date: Mon, 6 Oct 2025 16:11:33 -0400 Subject: [PATCH 11/53] updated docs, bumped version to 1.2.0-beta.1 --- pyproject.toml | 2 +- src/koi_net/processor/default_handlers.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 932f141..ae325bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "koi-net" -version = "1.1.0" +version = "1.2.0b1" description = "Implementation of KOI-net protocol in Python" authors = [ {name = "Luke Miller", email = "luke@block.science"} diff --git a/src/koi_net/processor/default_handlers.py b/src/koi_net/processor/default_handlers.py index bd6c3f2..6157855 100644 --- a/src/koi_net/processor/default_handlers.py +++ b/src/koi_net/processor/default_handlers.py @@ -155,12 +155,12 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): @KnowledgeHandler.create(HandlerType.Network, rid_types=[KoiNetNode]) def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): - """Makes contact with identified coordinator nodes. + """Makes contact with providers of RID types of interest. When an incoming node knowledge object is identified as a provider - of `orn:koi-net.node`, and not already known to the node, this - handler will propose a new edge subscribing to future node events, - and fetch existing nodes to catch up to the current state. + of an RID type of interest, this handler will propose a new edge + subscribing to future node events, and fetch existing nodes to catch + up to the current state. """ # prevents nodes from attempting to form a self loop if kobj.rid == ctx.identity.rid: From 382355c2d8dd0f312e2688846458415af01d1453 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 7 Oct 2025 13:24:54 -0400 Subject: [PATCH 12/53] set up structlog, moving towards better logging practices --- pyproject.toml | 1 + src/koi_net/__init__.py | 3 +- src/koi_net/behaviors.py | 11 +- src/koi_net/effector.py | 22 +-- src/koi_net/handshaker.py | 6 +- src/koi_net/identity.py | 5 +- src/koi_net/kobj_worker.py | 10 +- src/koi_net/lifecycle.py | 24 ++-- src/koi_net/log.py | 152 ++++++++++++++++++++ src/koi_net/network/error_handler.py | 12 +- src/koi_net/network/event_queue.py | 4 +- src/koi_net/network/graph.py | 16 +-- src/koi_net/network/request_handler.py | 27 ++-- src/koi_net/network/resolver.py | 26 ++-- src/koi_net/network/response_handler.py | 10 +- src/koi_net/poller.py | 4 +- src/koi_net/processor/default_handlers.py | 49 +++---- src/koi_net/processor/event_worker.py | 12 +- src/koi_net/processor/knowledge_pipeline.py | 42 +++--- src/koi_net/processor/kobj_queue.py | 6 +- src/koi_net/protocol/envelope.py | 10 +- src/koi_net/protocol/secure.py | 16 +-- src/koi_net/secure.py | 10 +- src/koi_net/server.py | 12 +- uv.lock | 16 ++- 25 files changed, 335 insertions(+), 171 deletions(-) create mode 100644 src/koi_net/log.py diff --git a/pyproject.toml b/pyproject.toml index ae325bc..2c2152e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ dependencies = [ "fastapi>=0.115.12", "uvicorn>=0.34.2", "rich>=14.1.0", + "structlog>=25.4.0", ] [project.optional-dependencies] diff --git a/src/koi_net/__init__.py b/src/koi_net/__init__.py index db33c75..80782ee 100644 --- a/src/koi_net/__init__.py +++ b/src/koi_net/__init__.py @@ -1 +1,2 @@ -from .core import NodeContainer \ No newline at end of file +from .core import NodeContainer +from . import log \ No newline at end of file diff --git a/src/koi_net/behaviors.py b/src/koi_net/behaviors.py index 229316d..a639d4c 100644 --- a/src/koi_net/behaviors.py +++ b/src/koi_net/behaviors.py @@ -1,4 +1,4 @@ -from logging import getLogger +import structlog from rid_lib.ext import Cache from rid_lib.types import KoiNetNode from rid_lib import RIDType @@ -8,11 +8,8 @@ from koi_net.network.resolver import NetworkResolver from koi_net.processor.kobj_queue import KobjQueue from koi_net.protocol.api_models import ErrorResponse -from .protocol.event import Event, EventType - - -logger = getLogger(__name__) +log = structlog.stdlib.get_logger() class Behaviors: @@ -34,13 +31,13 @@ def catch_up_with(self, target: KoiNetNode, rid_types: list[RIDType] = []): target: Node to catch up with rid_types: RID types to fetch from target (all types if list is empty) """ - logger.debug(f"catching up with {target} on {rid_types or 'all types'}") + log.debug(f"catching up with {target} on {rid_types or 'all types'}") payload = self.request_handler.fetch_manifests( node=target, rid_types=rid_types ) if type(payload) == ErrorResponse: - logger.debug("failed to reach node") + log.debug("failed to reach node") return for manifest in payload.manifests: if manifest.rid == self.identity.rid: diff --git a/src/koi_net/effector.py b/src/koi_net/effector.py index 6e6dd87..c142724 100644 --- a/src/koi_net/effector.py +++ b/src/koi_net/effector.py @@ -1,4 +1,4 @@ -import logging +import structlog from typing import Callable from enum import StrEnum from rid_lib.ext import Cache, Bundle @@ -8,7 +8,7 @@ from .processor.kobj_queue import KobjQueue from .context import ActionContext -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class BundleSource(StrEnum): @@ -70,18 +70,18 @@ def _try_cache(self, rid: RID) -> tuple[Bundle, BundleSource] | None: bundle = self.cache.read(rid) if bundle: - logger.debug("Cache hit") + log.debug("Cache hit") return bundle, BundleSource.CACHE else: - logger.debug("Cache miss") + log.debug("Cache miss") return None def _try_action(self, rid: RID) -> tuple[Bundle, BundleSource] | None: if type(rid) not in self._action_table: - logger.debug("No action available") + log.debug("No action available") return None - logger.debug("Action available") + log.debug("Action available") func = self._action_table[type(rid)] bundle = func( ctx=self.action_context, @@ -89,10 +89,10 @@ def _try_action(self, rid: RID) -> tuple[Bundle, BundleSource] | None: ) if bundle: - logger.debug("Action hit") + log.debug("Action hit") return bundle, BundleSource.ACTION else: - logger.debug("Action miss") + log.debug("Action miss") return None @@ -100,10 +100,10 @@ def _try_network(self, rid: RID) -> tuple[Bundle, KoiNetNode] | None: bundle, source = self.resolver.fetch_remote_bundle(rid) if bundle: - logger.debug("Network hit") + log.debug("Network hit") return bundle, source else: - logger.debug("Network miss") + log.debug("Network miss") return None @@ -127,7 +127,7 @@ def deref( handle_result: handles resulting bundle with knowledge pipeline when `True` """ - logger.debug(f"Dereferencing {rid!r}") + log.debug(f"Dereferencing {rid!r}") bundle, source = ( # if `refresh_cache`, skip try cache diff --git a/src/koi_net/handshaker.py b/src/koi_net/handshaker.py index d1c99eb..be6bfe7 100644 --- a/src/koi_net/handshaker.py +++ b/src/koi_net/handshaker.py @@ -1,11 +1,11 @@ -from logging import getLogger +import structlog from rid_lib.ext import Cache from rid_lib.types import KoiNetNode from koi_net.identity import NodeIdentity from koi_net.network.event_queue import EventQueue from .protocol.event import Event, EventType -logger = getLogger(__name__) +log = structlog.stdlib.get_logger() class Handshaker: @@ -24,7 +24,7 @@ def handshake_with(self, target: KoiNetNode): Pushes successive `FORGET` and `NEW` events to target node to reset the target's cache in case it already knew this node. """ - logger.debug(f"Initiating handshake with {target}") + log.debug(f"Initiating handshake with {target}") self.event_queue.push_event_to( Event.from_rid( event_type=EventType.FORGET, diff --git a/src/koi_net/identity.py b/src/koi_net/identity.py index 449c52b..0a2d5bf 100644 --- a/src/koi_net/identity.py +++ b/src/koi_net/identity.py @@ -1,10 +1,9 @@ -import logging +import structlog from rid_lib.types.koi_net_node import KoiNetNode from .config import NodeConfig from .protocol.node import NodeProfile - -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class NodeIdentity: diff --git a/src/koi_net/kobj_worker.py b/src/koi_net/kobj_worker.py index f52a2fd..dac4dee 100644 --- a/src/koi_net/kobj_worker.py +++ b/src/koi_net/kobj_worker.py @@ -1,13 +1,13 @@ import queue import traceback -import logging +import structlog from koi_net.models import END from koi_net.processor.knowledge_pipeline import KnowledgePipeline from koi_net.processor.kobj_queue import KobjQueue from koi_net.worker import ThreadWorker -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class KnowledgeProcessingWorker(ThreadWorker): @@ -23,16 +23,16 @@ def __init__( super().__init__() def run(self): - logger.info("Started kobj worker") + log.info("Started kobj worker") while True: try: item = self.kobj_queue.q.get(timeout=self.timeout) try: if item is END: - logger.info("Received 'END' signal, shutting down...") + log.info("Received 'END' signal, shutting down...") return - logger.info(f"Dequeued {item!r}") + log.info(f"Dequeued {item!r}") self.pipeline.process(item) finally: diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index 8bb8f8e..323e5c9 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -1,4 +1,4 @@ -import logging +import structlog from contextlib import contextmanager, asynccontextmanager from rid_lib.ext import Bundle, Cache @@ -16,7 +16,7 @@ from .network.graph import NetworkGraph from .identity import NodeIdentity -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class NodeLifecycle: @@ -61,26 +61,26 @@ def __init__( def run(self): """Synchronous context manager for node startup and shutdown.""" try: - logger.info("Starting node lifecycle...") + log.info("Starting node lifecycle...") self.start() yield except KeyboardInterrupt: - logger.info("Keyboard interrupt!") + log.info("Keyboard interrupt!") finally: - logger.info("Stopping node lifecycle...") + log.info("Stopping node lifecycle...") self.stop() @asynccontextmanager async def async_run(self): """Asynchronous context manager for node startup and shutdown.""" try: - logger.info("Starting async node lifecycle...") + log.info("Starting async node lifecycle...") self.start() yield except KeyboardInterrupt: - logger.info("Keyboard interrupt!") + log.info("Keyboard interrupt!") finally: - logger.info("Stopping async node lifecycle...") + log.info("Stopping async node lifecycle...") self.stop() def start(self): @@ -91,7 +91,7 @@ def start(self): of node bundle. Initiates handshake with first contact if node doesn't have any neighbors. Catches up with coordinator state. """ - logger.info("Starting processor worker thread") + log.info("Starting processor worker thread") self.kobj_worker.thread.start() self.event_worker.thread.start() @@ -104,7 +104,7 @@ def start(self): contents=self.identity.profile.model_dump() )) - logger.debug("Waiting for kobj queue to empty") + log.debug("Waiting for kobj queue to empty") self.kobj_queue.q.join() # TODO: FACTOR OUT BEHAVIOR @@ -112,7 +112,7 @@ def start(self): coordinators = self.graph.get_neighbors(direction="in", allowed_type=KoiNetNode) if len(coordinators) == 0 and self.config.koi_net.first_contact.rid: - logger.debug(f"I don't have any edges with coordinators, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") + log.debug(f"I don't have any edges with coordinators, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") self.handshaker.handshake_with(self.config.koi_net.first_contact.rid) @@ -127,7 +127,7 @@ def stop(self): Finishes processing knowledge object queue. """ - logger.info(f"Waiting for kobj queue to empty ({self.kobj_queue.q.unfinished_tasks} tasks remaining)") + log.info(f"Waiting for kobj queue to empty ({self.kobj_queue.q.unfinished_tasks} tasks remaining)") self.kobj_queue.q.put(END) self.event_queue.q.put(END) \ No newline at end of file diff --git a/src/koi_net/log.py b/src/koi_net/log.py new file mode 100644 index 0000000..e063551 --- /dev/null +++ b/src/koi_net/log.py @@ -0,0 +1,152 @@ +from datetime import datetime +import logging +from logging.handlers import RotatingFileHandler +import colorama +import structlog +import sys + + +def my_processor(_, __, event: dict): + # print(_, __, event) + event["path"] = event["logger"] + "." + event.pop("func_name") + return event + + +console_renderer = structlog.dev.ConsoleRenderer( + columns=[ + # Render the timestamp without the key name in yellow. + structlog.dev.Column( + "timestamp", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Style.DIM, + reset_style=colorama.Style.RESET_ALL, + value_repr=lambda t: datetime.fromisoformat(t).strftime("%Y-%m-%d %H:%M:%S"), + ), + ), + structlog.dev.Column( + "level", + structlog.dev.LogLevelColumnFormatter( + level_styles={ + level: colorama.Style.BRIGHT + color + for level, color in { + "critical": colorama.Fore.RED, + "exception": colorama.Fore.RED, + "error": colorama.Fore.RED, + "warn": colorama.Fore.YELLOW, + "warning": colorama.Fore.YELLOW, + "info": colorama.Fore.GREEN, + "debug": colorama.Fore.GREEN, + "notset": colorama.Back.RED, + }.items() + }, + reset_style=colorama.Style.RESET_ALL, + width=9 + ) + ), + # Render the event without the key name in bright magenta. + structlog.dev.Column( + "event", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Fore.WHITE, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + width=30 + ), + ), + # Default formatter for all keys not explicitly mentioned. The key is + # cyan, the value is green. + structlog.dev.Column( + "path", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Fore.MAGENTA, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + postfix=":" + ), + ), + # structlog.dev.Column( + # "func_name", + # structlog.dev.KeyValueColumnFormatter( + # key_style=None, + # value_style=colorama.Fore.MAGENTA, + # reset_style=colorama.Style.RESET_ALL, + # value_repr=str, + # ), + # ), + structlog.dev.Column( + "", + structlog.dev.KeyValueColumnFormatter( + key_style=colorama.Fore.BLUE, + value_style=colorama.Fore.GREEN, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + ), + ) + ] +) + +structlog.configure( + processors=[ + # If log level is too low, abort pipeline and throw away log entry. + structlog.stdlib.filter_by_level, + # Add the name of the logger to event dict. + structlog.stdlib.add_logger_name, + # Add log level to event dict. + structlog.stdlib.add_log_level, + # Perform %-style formatting. + structlog.stdlib.PositionalArgumentsFormatter(), + # Add a timestamp in ISO 8601 format. + structlog.processors.TimeStamper(fmt="iso"), + # If the "stack_info" key in the event dict is true, remove it and + # render the current stack trace in the "stack" key. + structlog.processors.StackInfoRenderer(), + # If the "exc_info" key in the event dict is either true or a + # sys.exc_info() tuple, remove "exc_info" and render the exception + # with traceback into the "exception" key. + # structlog.processors.format_exc_info, + # If some value is in bytes, decode it to a Unicode str. + structlog.processors.UnicodeDecoder(), + # Add callsite parameters. + structlog.processors.CallsiteParameterAdder( + { + structlog.processors.CallsiteParameter.MODULE, + structlog.processors.CallsiteParameter.FUNC_NAME, + # structlog.processors.CallsiteParameter.LINENO, + } + ), + my_processor, + # Render the final event dict as JSON. + console_renderer + # structlog.processors.JSONRenderer() + + ], + # `wrapper_class` is the bound logger that you get back from + # get_logger(). This one imitates the API of `logging.Logger`. + wrapper_class=structlog.stdlib.BoundLogger, + # `logger_factory` is used to create wrapped loggers that are used for + # OUTPUT. This one returns a `logging.Logger`. The final value (a JSON + # string) from the final processor (`JSONRenderer`) will be passed to + # the method of the same name as that you've called on the bound logger. + logger_factory=structlog.stdlib.LoggerFactory(), + # Effectively freeze configuration after creating the first bound + # logger. + cache_logger_on_first_use=True, +) + +file_handler = RotatingFileHandler( + filename="app.log", + maxBytes=10 * 1024 * 1024, + backupCount=5, + encoding="utf-8" +) + +logging.basicConfig( + format="%(message)s", + stream=sys.stdout, + level=logging.INFO, +) + +# log = structlog.stdlib.get_logger() \ No newline at end of file diff --git a/src/koi_net/network/error_handler.py b/src/koi_net/network/error_handler.py index 8ee6926..0e7fd47 100644 --- a/src/koi_net/network/error_handler.py +++ b/src/koi_net/network/error_handler.py @@ -1,11 +1,11 @@ -from logging import getLogger +import structlog from koi_net.handshaker import Handshaker from koi_net.protocol.errors import ErrorType from koi_net.protocol.event import EventType from rid_lib.types import KoiNetNode from ..processor.kobj_queue import KobjQueue -logger = getLogger(__name__) +log = structlog.stdlib.get_logger() class ErrorHandler: @@ -27,10 +27,10 @@ def handle_connection_error(self, node: KoiNetNode): self.timeout_counter.setdefault(node, 0) self.timeout_counter[node] += 1 - logger.debug(f"{node} has timed out {self.timeout_counter[node]} time(s)") + log.debug(f"{node} has timed out {self.timeout_counter[node]} time(s)") if self.timeout_counter[node] > 3: - logger.debug(f"Exceeded time out limit, forgetting node") + log.debug(f"Exceeded time out limit, forgetting node") self.kobj_queue.put_kobj(rid=node, event_type=EventType.FORGET) # do something @@ -41,10 +41,10 @@ def handle_protocol_error( node: KoiNetNode ): """Attempts handshake when this node is unknown to target.""" - logger.info(f"Handling protocol error {error_type} for node {node!r}") + log.info(f"Handling protocol error {error_type} for node {node!r}") match error_type: case ErrorType.UnknownNode: - logger.info("Peer doesn't know me, attempting handshake...") + log.info("Peer doesn't know me, attempting handshake...") self.handshaker.handshake_with(node) case ErrorType.InvalidKey: ... diff --git a/src/koi_net/network/event_queue.py b/src/koi_net/network/event_queue.py index cdd5aeb..0578643 100644 --- a/src/koi_net/network/event_queue.py +++ b/src/koi_net/network/event_queue.py @@ -1,4 +1,4 @@ -import logging +import structlog from queue import Queue from rid_lib.types import KoiNetNode @@ -6,7 +6,7 @@ from ..models import QueuedEvent from ..protocol.event import Event -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class EventQueue: diff --git a/src/koi_net/network/graph.py b/src/koi_net/network/graph.py index 24152ac..0dac50d 100644 --- a/src/koi_net/network/graph.py +++ b/src/koi_net/network/graph.py @@ -1,4 +1,4 @@ -import logging +import structlog from typing import Literal import networkx as nx from rid_lib import RIDType @@ -7,7 +7,7 @@ from ..identity import NodeIdentity from ..protocol.edge import EdgeProfile, EdgeStatus -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class NetworkGraph: @@ -24,22 +24,22 @@ def __init__(self, cache: Cache, identity: NodeIdentity): def generate(self): """Generates directed graph from cached KOI nodes and edges.""" - logger.debug("Generating network graph") + log.debug("Generating network graph") self.dg.clear() for rid in self.cache.list_rids(): if type(rid) == KoiNetNode: self.dg.add_node(rid) - logger.debug(f"Added node {rid!r}") + log.debug(f"Added node {rid!r}") elif type(rid) == KoiNetEdge: edge_bundle = self.cache.read(rid) if not edge_bundle: - logger.warning(f"Failed to load {rid!r}") + log.warning(f"Failed to load {rid!r}") continue edge_profile = edge_bundle.validate_contents(EdgeProfile) self.dg.add_edge(edge_profile.source, edge_profile.target, rid=rid) - logger.debug(f"Added edge {rid!r} ({edge_profile.source} -> {edge_profile.target})") - logger.debug("Done") + log.debug(f"Added edge {rid!r} ({edge_profile.source} -> {edge_profile.target})") + log.debug("Done") def get_edge(self, source: KoiNetNode, target: KoiNetNode,) -> KoiNetEdge | None: """Returns edge RID given the RIDs of a source and target node.""" @@ -97,7 +97,7 @@ def get_neighbors( edge_bundle = self.cache.read(edge_rid) if not edge_bundle: - logger.warning(f"Failed to find edge {edge_rid!r} in cache") + log.warning(f"Failed to find edge {edge_rid!r} in cache") continue edge_profile = edge_bundle.validate_contents(EdgeProfile) diff --git a/src/koi_net/network/request_handler.py b/src/koi_net/network/request_handler.py index 15294a9..6042680 100644 --- a/src/koi_net/network/request_handler.py +++ b/src/koi_net/network/request_handler.py @@ -1,4 +1,4 @@ -import logging +import structlog import httpx from rid_lib import RID from rid_lib.ext import Cache @@ -30,8 +30,7 @@ from ..secure import Secure from .error_handler import ErrorHandler - -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() # Custom error types for request handling @@ -74,7 +73,7 @@ def __init__( def get_url(self, node_rid: KoiNetNode) -> str: """Retrieves URL of a node from its RID.""" - logger.debug(f"Getting URL for {node_rid!r}") + log.debug(f"Getting URL for {node_rid!r}") node_url = None if node_rid == self.identity.rid: @@ -84,20 +83,20 @@ def get_url(self, node_rid: KoiNetNode) -> str: if node_bundle: node_profile = node_bundle.validate_contents(NodeProfile) - logger.debug(f"Found node profile: {node_profile}") + log.debug(f"Found node profile: {node_profile}") if node_profile.node_type != NodeType.FULL: raise PartialNodeQueryError("Can't query partial node") node_url = node_profile.base_url else: if node_rid == self.identity.config.koi_net.first_contact.rid: - logger.debug("Found URL of first contact") + log.debug("Found URL of first contact") node_url = self.identity.config.koi_net.first_contact.url if not node_url: raise NodeNotFoundError("Node not found") - logger.debug(f"Resolved {node_rid!r} to {node_url}") + log.debug(f"Resolved {node_rid!r} to {node_url}") return node_url def make_request( @@ -108,7 +107,7 @@ def make_request( ) -> ResponseModels | None: """Makes a request to a node.""" url = self.get_url(node) + path - logger.info(f"Making request to {url}") + log.info(f"Making request to {url}") signed_envelope = self.secure.create_envelope( payload=request, @@ -118,7 +117,7 @@ def make_request( try: result = httpx.post(url, data=signed_envelope.model_dump_json(exclude_none=True)) except httpx.ConnectError as err: - logger.debug("Failed to connect") + log.debug("Failed to connect") self.error_handler.handle_connection_error(node) raise err @@ -157,7 +156,7 @@ def broadcast_events( """ request = req or EventsPayload.model_validate(kwargs) self.make_request(node, BROADCAST_EVENTS_PATH, request) - logger.info(f"Broadcasted {len(request.events)} event(s) to {node!r}") + log.info(f"Broadcasted {len(request.events)} event(s) to {node!r}") def poll_events( self, @@ -172,7 +171,7 @@ def poll_events( request = req or PollEvents.model_validate(kwargs) resp = self.make_request(node, POLL_EVENTS_PATH, request) if type(resp) != ErrorResponse: - logger.info(f"Polled {len(resp.events)} events from {node!r}") + log.info(f"Polled {len(resp.events)} events from {node!r}") return resp def fetch_rids( @@ -188,7 +187,7 @@ def fetch_rids( request = req or FetchRids.model_validate(kwargs) resp = self.make_request(node, FETCH_RIDS_PATH, request) if type(resp) != ErrorResponse: - logger.info(f"Fetched {len(resp.rids)} RID(s) from {node!r}") + log.info(f"Fetched {len(resp.rids)} RID(s) from {node!r}") return resp def fetch_manifests( @@ -204,7 +203,7 @@ def fetch_manifests( request = req or FetchManifests.model_validate(kwargs) resp = self.make_request(node, FETCH_MANIFESTS_PATH, request) if type(resp) != ErrorResponse: - logger.info(f"Fetched {len(resp.manifests)} manifest(s) from {node!r}") + log.info(f"Fetched {len(resp.manifests)} manifest(s) from {node!r}") return resp def fetch_bundles( @@ -220,5 +219,5 @@ def fetch_bundles( request = req or FetchBundles.model_validate(kwargs) resp = self.make_request(node, FETCH_BUNDLES_PATH, request) if type(resp) != ErrorResponse: - logger.info(f"Fetched {len(resp.bundles)} bundle(s) from {node!r}") + log.info(f"Fetched {len(resp.bundles)} bundle(s) from {node!r}") return resp \ No newline at end of file diff --git a/src/koi_net/network/resolver.py b/src/koi_net/network/resolver.py index d80635a..9264123 100644 --- a/src/koi_net/network/resolver.py +++ b/src/koi_net/network/resolver.py @@ -1,4 +1,4 @@ -import logging +import structlog import httpx from rid_lib import RID from rid_lib.core import RIDType @@ -13,7 +13,7 @@ from ..identity import NodeIdentity from ..config import NodeConfig -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class NetworkResolver: @@ -45,7 +45,7 @@ def __init__( def get_state_providers(self, rid_type: RIDType) -> list[KoiNetNode]: """Returns list of node RIDs which provide state for specified RID type.""" - logger.debug(f"Looking for state providers of {rid_type}") + log.debug(f"Looking for state providers of {rid_type}") provider_nodes = [] for node_rid in self.cache.list_rids(rid_types=[KoiNetNode]): if node_rid == self.identity.rid: @@ -56,17 +56,17 @@ def get_state_providers(self, rid_type: RIDType) -> list[KoiNetNode]: node_profile = node_bundle.validate_contents(NodeProfile) if (node_profile.node_type == NodeType.FULL) and (rid_type in node_profile.provides.state): - logger.debug(f"Found provider {node_rid!r}") + log.debug(f"Found provider {node_rid!r}") provider_nodes.append(node_rid) if not provider_nodes: - logger.debug("Failed to find providers") + log.debug("Failed to find providers") return provider_nodes def fetch_remote_bundle(self, rid: RID) -> tuple[Bundle | None, KoiNetNode | None]: """Attempts to fetch a bundle by RID from known peer nodes.""" - logger.debug(f"Fetching remote bundle {rid!r}") + log.debug(f"Fetching remote bundle {rid!r}") remote_bundle, node_rid = None, None for node_rid in self.get_state_providers(type(rid)): payload = self.request_handler.fetch_bundles( @@ -74,18 +74,18 @@ def fetch_remote_bundle(self, rid: RID) -> tuple[Bundle | None, KoiNetNode | Non if payload.bundles: remote_bundle = payload.bundles[0] - logger.debug(f"Got bundle from {node_rid!r}") + log.debug(f"Got bundle from {node_rid!r}") break if not remote_bundle: - logger.warning("Failed to fetch remote bundle") + log.warning("Failed to fetch remote bundle") return remote_bundle, node_rid def fetch_remote_manifest(self, rid: RID) -> tuple[Bundle | None, KoiNetNode | None]: """Attempts to fetch a manifest by RID from known peer nodes.""" - logger.debug(f"Fetching remote manifest {rid!r}") + log.debug(f"Fetching remote manifest {rid!r}") remote_manifest, node_rid = None, None for node_rid in self.get_state_providers(type(rid)): payload = self.request_handler.fetch_manifests( @@ -93,11 +93,11 @@ def fetch_remote_manifest(self, rid: RID) -> tuple[Bundle | None, KoiNetNode | N if payload.manifests: remote_manifest = payload.manifests[0] - logger.debug(f"Got bundle from {node_rid!r}") + log.debug(f"Got bundle from {node_rid!r}") break if not remote_manifest: - logger.warning("Failed to fetch remote bundle") + log.warning("Failed to fetch remote bundle") return remote_manifest, node_rid @@ -136,12 +136,12 @@ def poll_neighbors(self) -> dict[KoiNetNode, list[Event]]: continue if payload.events: - logger.debug(f"Received {len(payload.events)} events from {node_rid!r}") + log.debug(f"Received {len(payload.events)} events from {node_rid!r}") event_dict[node_rid] = payload.events except httpx.ConnectError: - logger.debug(f"Failed to reach node {node_rid!r}") + log.debug(f"Failed to reach node {node_rid!r}") continue return event_dict \ No newline at end of file diff --git a/src/koi_net/network/response_handler.py b/src/koi_net/network/response_handler.py index 702cddf..c1722fd 100644 --- a/src/koi_net/network/response_handler.py +++ b/src/koi_net/network/response_handler.py @@ -1,4 +1,4 @@ -import logging +import structlog from rid_lib import RID from rid_lib.types import KoiNetNode from rid_lib.ext import Manifest, Cache @@ -13,7 +13,7 @@ FetchBundles, ) -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class ResponseHandler: @@ -29,14 +29,14 @@ def __init__( def fetch_rids(self, req: FetchRids, source: KoiNetNode) -> RidsPayload: """Returns response to fetch RIDs request.""" - logger.info(f"Request to fetch rids, allowed types {req.rid_types}") + log.info(f"Request to fetch rids, allowed types {req.rid_types}") rids = self.cache.list_rids(req.rid_types) return RidsPayload(rids=rids) def fetch_manifests(self, req: FetchManifests, source: KoiNetNode) -> ManifestsPayload: """Returns response to fetch manifests request.""" - logger.info(f"Request to fetch manifests, allowed types {req.rid_types}, rids {req.rids}") + log.info(f"Request to fetch manifests, allowed types {req.rid_types}, rids {req.rids}") manifests: list[Manifest] = [] not_found: list[RID] = [] @@ -52,7 +52,7 @@ def fetch_manifests(self, req: FetchManifests, source: KoiNetNode) -> ManifestsP def fetch_bundles(self, req: FetchBundles, source: KoiNetNode) -> BundlesPayload: """Returns response to fetch bundles request.""" - logger.info(f"Request to fetch bundles, requested rids {req.rids}") + log.info(f"Request to fetch bundles, requested rids {req.rids}") bundles: list[Bundle] = [] not_found: list[RID] = [] diff --git a/src/koi_net/poller.py b/src/koi_net/poller.py index eab03aa..258cf8d 100644 --- a/src/koi_net/poller.py +++ b/src/koi_net/poller.py @@ -1,12 +1,12 @@ import time -import logging +import structlog from .processor.kobj_queue import KobjQueue from .lifecycle import NodeLifecycle from .network.resolver import NetworkResolver from .config import NodeConfig -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class NodePoller: diff --git a/src/koi_net/processor/default_handlers.py b/src/koi_net/processor/default_handlers.py index 6157855..2ff0cfb 100644 --- a/src/koi_net/processor/default_handlers.py +++ b/src/koi_net/processor/default_handlers.py @@ -1,6 +1,6 @@ """Implementation of default knowledge handlers.""" -import logging +import structlog from rid_lib.ext import Bundle from rid_lib.ext.utils import sha256_hash from rid_lib.types import KoiNetNode, KoiNetEdge @@ -12,7 +12,8 @@ from ..protocol.edge import EdgeProfile, EdgeStatus, EdgeType, generate_edge_bundle from ..protocol.node import NodeProfile -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() + # RID handlers @@ -24,7 +25,7 @@ def basic_rid_handler(ctx: HandlerContext, kobj: KnowledgeObject): RID is known to this node. """ if (kobj.rid == ctx.identity.rid and kobj.source): - logger.debug("Don't let anyone else tell me who I am!") + log.debug("Don't let anyone else tell me who I am!") return STOP_CHAIN if kobj.event_type == EventType.FORGET: @@ -45,17 +46,17 @@ def basic_manifest_handler(ctx: HandlerContext, kobj: KnowledgeObject): if prev_bundle: if kobj.manifest.sha256_hash == prev_bundle.manifest.sha256_hash: - logger.debug("Hash of incoming manifest is same as existing knowledge, ignoring") + log.debug("Hash of incoming manifest is same as existing knowledge, ignoring") return STOP_CHAIN if kobj.manifest.timestamp <= prev_bundle.manifest.timestamp: - logger.debug("Timestamp of incoming manifest is the same or older than existing knowledge, ignoring") + log.debug("Timestamp of incoming manifest is the same or older than existing knowledge, ignoring") return STOP_CHAIN - logger.debug("RID previously known to me, labeling as 'UPDATE'") + log.debug("RID previously known to me, labeling as 'UPDATE'") kobj.normalized_event_type = EventType.UPDATE else: - logger.debug("RID previously unknown to me, labeling as 'NEW'") + log.debug("RID previously unknown to me, labeling as 'NEW'") kobj.normalized_event_type = EventType.NEW return kobj @@ -78,7 +79,7 @@ def secure_profile_handler(ctx: HandlerContext, kobj: KnowledgeObject): node_rid: KoiNetNode = kobj.rid if sha256_hash(node_profile.public_key) != node_rid.hash: - logger.warning(f"Public key hash mismatch for {node_rid!r}!") + log.warning(f"Public key hash mismatch for {node_rid!r}!") return STOP_CHAIN @KnowledgeHandler.create( @@ -104,13 +105,13 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): if edge_profile.status != EdgeStatus.PROPOSED: return - logger.debug("Handling edge negotiation") + log.debug("Handling edge negotiation") peer_rid = edge_profile.target peer_bundle = ctx.cache.read(peer_rid) if not peer_bundle: - logger.warning(f"Peer {peer_rid!r} unknown to me") + log.warning(f"Peer {peer_rid!r} unknown to me") return STOP_CHAIN peer_profile = peer_bundle.validate_contents(NodeProfile) @@ -125,11 +126,11 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): abort = False if (edge_profile.edge_type == EdgeType.WEBHOOK and peer_profile.node_type == NodeType.PARTIAL): - logger.debug("Partial nodes cannot use webhooks") + log.debug("Partial nodes cannot use webhooks") abort = True if not set(edge_profile.rid_types).issubset(provided_events): - logger.debug("Requested RID types not provided by this node") + log.debug("Requested RID types not provided by this node") abort = True if abort: @@ -139,7 +140,7 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): else: # approve edge profile - logger.debug("Approving proposed edge") + log.debug("Approving proposed edge") edge_profile.status = EdgeStatus.APPROVED updated_bundle = Bundle.generate(kobj.rid, edge_profile.model_dump()) @@ -148,7 +149,7 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): elif edge_profile.target == ctx.identity.rid: if edge_profile.status == EdgeStatus.APPROVED: - logger.debug("Edge approved by other node!") + log.debug("Edge approved by other node!") # Network handlers @@ -176,8 +177,8 @@ def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): if not available_rid_types: return - logger.info("Identified a coordinator!") - logger.info("Proposing new edge") + log.info("Identified a coordinator!") + log.info("Proposing new edge") # already have an edge established edge_rid = ctx.graph.get_edge( @@ -217,7 +218,7 @@ def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): edge_bundle = Bundle.generate(edge_rid, edge_profile.model_dump()) ctx.kobj_queue.put_kobj(bundle=edge_bundle) - logger.info("Catching up on network state") + log.info("Catching up on network state") payload = ctx.request_handler.fetch_rids( node=kobj.rid, @@ -225,16 +226,16 @@ def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): ) for rid in payload.rids: if rid == ctx.identity.rid: - logger.info("Skipping myself") + log.info("Skipping myself") continue if ctx.cache.exists(rid): - logger.info(f"Skipping known RID {rid!r}") + log.info(f"Skipping known RID {rid!r}") continue # marked as external since we are handling RIDs from another node # will fetch remotely instead of checking local cache ctx.kobj_queue.put_kobj(rid=rid, source=kobj.rid) - logger.info("Done") + log.info("Done") @KnowledgeHandler.create(HandlerType.Network) @@ -260,12 +261,12 @@ def basic_network_output_filter(ctx: HandlerContext, kobj: KnowledgeObject): edge_profile = kobj.bundle.validate_contents(EdgeProfile) if edge_profile.source == ctx.identity.rid: - logger.debug(f"Adding edge target '{edge_profile.target!r}' to network targets") + log.debug(f"Adding edge target '{edge_profile.target!r}' to network targets") kobj.network_targets.update([edge_profile.target]) involves_me = True elif edge_profile.target == ctx.identity.rid: - logger.debug(f"Adding edge source '{edge_profile.source!r}' to network targets") + log.debug(f"Adding edge source '{edge_profile.source!r}' to network targets") kobj.network_targets.update([edge_profile.source]) involves_me = True @@ -276,7 +277,7 @@ def basic_network_output_filter(ctx: HandlerContext, kobj: KnowledgeObject): allowed_type=type(kobj.rid) ) - logger.debug(f"Updating network targets with '{type(kobj.rid)}' subscribers: {subscribers}") + log.debug(f"Updating network targets with '{type(kobj.rid)}' subscribers: {subscribers}") kobj.network_targets.update(subscribers) return kobj @@ -294,5 +295,5 @@ def forget_edge_on_node_deletion(ctx: HandlerContext, kobj: KnowledgeObject): edge_profile = edge_bundle.validate_contents(EdgeProfile) if kobj.rid in (edge_profile.source, edge_profile.target): - logger.debug("Identified edge with forgotten node") + log.debug("Identified edge with forgotten node") ctx.kobj_queue.put_kobj(rid=edge_rid, event_type=EventType.FORGET) \ No newline at end of file diff --git a/src/koi_net/processor/event_worker.py b/src/koi_net/processor/event_worker.py index c27e368..1cf405b 100644 --- a/src/koi_net/processor/event_worker.py +++ b/src/koi_net/processor/event_worker.py @@ -1,7 +1,7 @@ import queue import traceback import time -import logging +import structlog from rid_lib.ext import Cache from rid_lib.types import KoiNetNode @@ -15,7 +15,7 @@ from koi_net.protocol.node import NodeProfile, NodeType from koi_net.worker import ThreadWorker -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class EventProcessingWorker(ThreadWorker): @@ -74,12 +74,12 @@ def decide_event(self, item: QueuedEvent) -> bool: return True else: - logger.warning(f"Couldn't handle event {item.event!r} in queue, node {item.target!r} unknown to me") + log.warning(f"Couldn't handle event {item.event!r} in queue, node {item.target!r} unknown to me") return False def run(self): - logger.info("Started event worker") + log.info("Started event worker") while True: now = time.time() try: @@ -87,12 +87,12 @@ def run(self): try: if item is END: - logger.info("Received 'END' signal, flushing buffer...") + log.info("Received 'END' signal, flushing buffer...") for target in self.event_buffer.keys(): self.flush_buffer(target, self.event_buffer[target]) return - logger.info(f"Dequeued {item.event!r} -> {item.target!r}") + log.info(f"Dequeued {item.event!r} -> {item.target!r}") if not self.decide_event(item): continue diff --git a/src/koi_net/processor/knowledge_pipeline.py b/src/koi_net/processor/knowledge_pipeline.py index e107f31..7076591 100644 --- a/src/koi_net/processor/knowledge_pipeline.py +++ b/src/koi_net/processor/knowledge_pipeline.py @@ -1,4 +1,4 @@ -import logging +import structlog from rid_lib.types import KoiNetEdge, KoiNetNode from rid_lib.ext import Cache from ..protocol.event import EventType @@ -18,7 +18,7 @@ if TYPE_CHECKING: from ..context import HandlerContext -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class KnowledgePipeline: @@ -71,7 +71,7 @@ def call_handler_chain( if handler.event_types and kobj.event_type not in handler.event_types: continue - logger.debug(f"Calling {handler_type} handler '{handler.func.__name__}'") + log.debug(f"Calling {handler_type} handler '{handler.func.__name__}'") resp = handler.func( ctx=self.handler_context, @@ -80,7 +80,7 @@ def call_handler_chain( # stops handler chain execution if resp is STOP_CHAIN: - logger.debug(f"Handler chain stopped by {handler.func.__name__}") + log.debug(f"Handler chain stopped by {handler.func.__name__}") return STOP_CHAIN # kobj unmodified elif resp is None: @@ -88,7 +88,7 @@ def call_handler_chain( # kobj modified by handler elif isinstance(resp, KnowledgeObject): kobj = resp - logger.debug(f"Knowledge object modified by {handler.func.__name__}") + log.debug(f"Knowledge object modified by {handler.func.__name__}") else: raise ValueError(f"Handler {handler.func.__name__} returned invalid response '{resp}'") @@ -107,36 +107,36 @@ def process(self, kobj: KnowledgeObject): The pipeline may be stopped by any point by a single handler returning the `STOP_CHAIN` sentinel. In that case, the process will exit immediately. Further handlers of that type and later handler chains will not be called. """ - logger.debug(f"Handling {kobj!r}") + log.debug(f"Handling {kobj!r}") kobj = self.call_handler_chain(HandlerType.RID, kobj) if kobj is STOP_CHAIN: return if kobj.event_type == EventType.FORGET: bundle = self.cache.read(kobj.rid) if not bundle: - logger.debug("Local bundle not found") + log.debug("Local bundle not found") return # the bundle (to be deleted) attached to kobj for downstream analysis - logger.debug("Adding local bundle (to be deleted) to knowledge object") + log.debug("Adding local bundle (to be deleted) to knowledge object") kobj.manifest = bundle.manifest kobj.contents = bundle.contents else: # attempt to retrieve manifest if not kobj.manifest: - logger.debug("Manifest not found") + log.debug("Manifest not found") if not kobj.source: return - logger.debug("Attempting to fetch remote manifest from source") + log.debug("Attempting to fetch remote manifest from source") payload = self.request_handler.fetch_manifests( node=kobj.source, rids=[kobj.rid] ) if not payload.manifests: - logger.debug("Failed to find manifest") + log.debug("Failed to find manifest") return kobj.manifest = payload.manifests[0] @@ -146,24 +146,24 @@ def process(self, kobj: KnowledgeObject): # attempt to retrieve bundle if not kobj.bundle: - logger.debug("Bundle not found") + log.debug("Bundle not found") if kobj.source is None: return - logger.debug("Attempting to fetch remote bundle from source") + log.debug("Attempting to fetch remote bundle from source") payload = self.request_handler.fetch_bundles( node=kobj.source, rids=[kobj.rid] ) if not payload.bundles: - logger.debug("Failed to find bundle") + log.debug("Failed to find bundle") return bundle = payload.bundles[0] if kobj.manifest != bundle.manifest: - logger.warning("Retrieved bundle contains a different manifest") + log.warning("Retrieved bundle contains a different manifest") kobj.manifest = bundle.manifest kobj.contents = bundle.contents @@ -172,28 +172,28 @@ def process(self, kobj: KnowledgeObject): if kobj is STOP_CHAIN: return if kobj.normalized_event_type in (EventType.UPDATE, EventType.NEW): - logger.info(f"Writing to cache: {kobj!r}") + log.info(f"Writing to cache: {kobj!r}") self.cache.write(kobj.bundle) elif kobj.normalized_event_type == EventType.FORGET: - logger.info(f"Deleting from cache: {kobj!r}") + log.info(f"Deleting from cache: {kobj!r}") self.cache.delete(kobj.rid) else: - logger.debug("Normalized event type was never set, no cache or network operations will occur") + log.debug("Normalized event type was never set, no cache or network operations will occur") return if type(kobj.rid) in (KoiNetNode, KoiNetEdge): - logger.debug("Change to node or edge, regenerating network graph") + log.debug("Change to node or edge, regenerating network graph") self.graph.generate() kobj = self.call_handler_chain(HandlerType.Network, kobj) if kobj is STOP_CHAIN: return if kobj.network_targets: - logger.debug(f"Broadcasting event to {len(kobj.network_targets)} network target(s)") + log.debug(f"Broadcasting event to {len(kobj.network_targets)} network target(s)") else: - logger.debug("No network targets set") + log.debug("No network targets set") for node in kobj.network_targets: self.event_queue.push_event_to(kobj.normalized_event, node) diff --git a/src/koi_net/processor/kobj_queue.py b/src/koi_net/processor/kobj_queue.py index be6ba10..1ed55b6 100644 --- a/src/koi_net/processor/kobj_queue.py +++ b/src/koi_net/processor/kobj_queue.py @@ -1,4 +1,4 @@ -import logging +import structlog from queue import Queue from rid_lib.core import RID from rid_lib.ext import Bundle, Manifest @@ -6,7 +6,7 @@ from ..protocol.event import Event, EventType from .knowledge_object import KnowledgeObject -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class KobjQueue: @@ -48,4 +48,4 @@ def put_kobj( raise ValueError("One of 'rid', 'manifest', 'bundle', 'event', or 'kobj' must be provided") self.q.put(_kobj) - logger.debug(f"Queued {_kobj!r}") + log.debug(f"Queued {_kobj!r}") diff --git a/src/koi_net/protocol/envelope.py b/src/koi_net/protocol/envelope.py index 0d77d6a..964932b 100644 --- a/src/koi_net/protocol/envelope.py +++ b/src/koi_net/protocol/envelope.py @@ -1,4 +1,4 @@ -import logging +import structlog from typing import Generic, TypeVar from pydantic import BaseModel, ConfigDict from rid_lib.types import KoiNetNode @@ -7,7 +7,7 @@ from .api_models import RequestModels, ResponseModels -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() T = TypeVar("T", bound=RequestModels | ResponseModels) @@ -28,7 +28,7 @@ def verify_with(self, pub_key: PublicKey): target_node=self.target_node ) - logger.debug(f"Verifying envelope: {unsigned_envelope.model_dump_json(exclude_none=True)}") + log.debug(f"Verifying envelope: {unsigned_envelope.model_dump_json(exclude_none=True)}") pub_key.verify( self.signature, @@ -43,8 +43,8 @@ class UnsignedEnvelope(BaseModel, Generic[T]): target_node: KoiNetNode def sign_with(self, priv_key: PrivateKey) -> SignedEnvelope[T]: - logger.debug(f"Signing envelope: {self.model_dump_json(exclude_none=True)}") - logger.debug(f"Type: [{type(self.payload)}]") + log.debug(f"Signing envelope: {self.model_dump_json(exclude_none=True)}") + log.debug(f"Type: [{type(self.payload)}]") signature = priv_key.sign( self.model_dump_json(exclude_none=True).encode() diff --git a/src/koi_net/protocol/secure.py b/src/koi_net/protocol/secure.py index 683f8be..575089e 100644 --- a/src/koi_net/protocol/secure.py +++ b/src/koi_net/protocol/secure.py @@ -1,4 +1,4 @@ -import logging +import structlog from base64 import b64decode, b64encode from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import ec @@ -9,7 +9,7 @@ encode_dss_signature ) -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() def der_to_raw_signature(der_signature: bytes, curve=ec.SECP256R1()) -> bytes: @@ -91,9 +91,9 @@ def sign(self, message: bytes) -> str: signature = b64encode(raw_signature_bytes).decode() - logger.debug(f"Signing message with [{self.public_key().to_der()}]") - logger.debug(f"hash: {hashed_message}") - logger.debug(f"signature: {signature}") + log.debug(f"Signing message with [{self.public_key().to_der()}]") + log.debug(f"hash: {hashed_message}") + log.debug(f"signature: {signature}") return signature @@ -144,9 +144,9 @@ def verify(self, signature: str, message: bytes) -> bool: # print() # print(message.decode()) - # logger.debug(f"Verifying message with [{self.to_der()}]") - # logger.debug(f"hash: {hashed_message}") - # logger.debug(f"signature: {signature}") + # log.debug(f"Verifying message with [{self.to_der()}]") + # log.debug(f"hash: {hashed_message}") + # log.debug(f"signature: {signature}") raw_signature_bytes = b64decode(signature) der_signature_bytes = raw_to_der_signature(raw_signature_bytes) diff --git a/src/koi_net/secure.py b/src/koi_net/secure.py index a742dd6..367a888 100644 --- a/src/koi_net/secure.py +++ b/src/koi_net/secure.py @@ -1,4 +1,4 @@ -import logging +import structlog from functools import wraps import cryptography.exceptions @@ -20,7 +20,7 @@ ) from .config import NodeConfig -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class Secure: @@ -120,15 +120,15 @@ def envelope_handler(self, func): """ @wraps(func) async def wrapper(req: SignedEnvelope, *args, **kwargs) -> SignedEnvelope | None: - logger.info("Validating envelope") + log.info("Validating envelope") self.validate_envelope(req) - logger.info("Calling endpoint handler") + log.info("Calling endpoint handler") result = await func(req, *args, **kwargs) if result is not None: - logger.info("Creating response envelope") + log.info("Creating response envelope") return self.create_envelope( payload=result, target=req.source_node diff --git a/src/koi_net/server.py b/src/koi_net/server.py index b1cfdf4..af494db 100644 --- a/src/koi_net/server.py +++ b/src/koi_net/server.py @@ -1,4 +1,4 @@ -import logging +import structlog import uvicorn from contextlib import asynccontextmanager from fastapi import FastAPI, APIRouter @@ -31,7 +31,7 @@ from .lifecycle import NodeLifecycle from .config import NodeConfig -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class NodeServer: @@ -104,9 +104,9 @@ def run(self): def protocol_error_handler(self, request, exc: ProtocolError): """Catches `ProtocolError` and returns as `ErrorResponse`.""" - logger.info(f"caught protocol error: {exc}") + log.info(f"caught protocol error: {exc}") resp = ErrorResponse(error=exc.error_type) - logger.info(f"returning error response: {resp}") + log.info(f"returning error response: {resp}") return JSONResponse( status_code=400, content=resp.model_dump(mode="json") @@ -114,7 +114,7 @@ def protocol_error_handler(self, request, exc: ProtocolError): async def broadcast_events(self, req: SignedEnvelope[EventsPayload]): """Handles events broadcast endpoint.""" - logger.info(f"Request to {BROADCAST_EVENTS_PATH}, received {len(req.payload.events)} event(s)") + log.info(f"Request to {BROADCAST_EVENTS_PATH}, received {len(req.payload.events)} event(s)") for event in req.payload.events: self.kobj_queue.put_kobj(event=event, source=req.source_node) @@ -122,7 +122,7 @@ async def poll_events( self, req: SignedEnvelope[PollEvents] ) -> SignedEnvelope[EventsPayload] | ErrorResponse: """Handles poll events endpoint.""" - logger.info(f"Request to {POLL_EVENTS_PATH}") + log.info(f"Request to {POLL_EVENTS_PATH}") events = self.poll_event_buf.flush(req.source_node) return EventsPayload(events=events) diff --git a/uv.lock b/uv.lock index cc4bff2..eb14b30 100644 --- a/uv.lock +++ b/uv.lock @@ -500,7 +500,7 @@ wheels = [ [[package]] name = "koi-net" -version = "1.1.0" +version = "1.2.0b1" source = { editable = "." } dependencies = [ { name = "cryptography" }, @@ -513,6 +513,7 @@ dependencies = [ { name = "rich" }, { name = "rid-lib" }, { name = "ruamel-yaml" }, + { name = "structlog" }, { name = "uvicorn" }, ] @@ -546,6 +547,7 @@ requires-dist = [ { name = "sphinx-autoapi", marker = "extra == 'docs'", specifier = ">=3.6.0" }, { name = "sphinx-autodoc-typehints", marker = "extra == 'docs'", specifier = ">=3.0.1" }, { name = "sphinx-rtd-theme", marker = "extra == 'docs'", specifier = ">=3.0.2" }, + { name = "structlog", specifier = ">=25.4.0" }, { name = "twine", marker = "extra == 'dev'", specifier = ">=6.0" }, { name = "uvicorn", specifier = ">=0.34.2" }, ] @@ -1273,6 +1275,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ce/fd/901cfa59aaa5b30a99e16876f11abe38b59a1a2c51ffb3d7142bb6089069/starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51", size = 72991, upload-time = "2025-08-24T13:36:40.887Z" }, ] +[[package]] +name = "structlog" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/79/b9/6e672db4fec07349e7a8a8172c1a6ae235c58679ca29c3f86a61b5e59ff3/structlog-25.4.0.tar.gz", hash = "sha256:186cd1b0a8ae762e29417095664adf1d6a31702160a46dacb7796ea82f7409e4", size = 1369138, upload-time = "2025-06-02T08:21:12.971Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/4a/97ee6973e3a73c74c8120d59829c3861ea52210667ec3e7a16045c62b64d/structlog-25.4.0-py3-none-any.whl", hash = "sha256:fe809ff5c27e557d14e613f45ca441aabda051d119ee5a0102aaba6ce40eed2c", size = 68720, upload-time = "2025-06-02T08:21:11.43Z" }, +] + [[package]] name = "tomli" version = "2.2.1" From 864061d2ecf88d170ffc3f833e5b76a9b98b3761 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Thu, 9 Oct 2025 11:01:51 -0400 Subject: [PATCH 13/53] refactoring and rearranging, removed behaviors and moved logic into lifecycle, added effector back to handler context --- src/koi_net/__init__.py | 3 +- src/koi_net/behaviors.py | 48 ------------------- src/koi_net/context.py | 6 ++- src/koi_net/core.py | 37 +++++--------- src/koi_net/lifecycle.py | 48 +++++++++++-------- src/koi_net/{log.py => logger.py} | 48 ++++++++++++++----- src/koi_net/models.py | 14 ------ src/koi_net/network/event_queue.py | 10 ++-- .../{ => network}/poll_event_buffer.py | 0 src/koi_net/processor/event_worker.py | 11 ++--- .../{default_handlers.py => handlers.py} | 2 +- src/koi_net/{ => processor}/kobj_worker.py | 11 ++--- .../{knowledge_pipeline.py => pipeline.py} | 9 ++-- src/koi_net/protocol/envelope.py | 1 - src/koi_net/sentry.py | 13 +++++ src/koi_net/server.py | 6 ++- src/koi_net/worker.py | 7 +++ 17 files changed, 128 insertions(+), 146 deletions(-) delete mode 100644 src/koi_net/behaviors.py rename src/koi_net/{log.py => logger.py} (87%) delete mode 100644 src/koi_net/models.py rename src/koi_net/{ => network}/poll_event_buffer.py (100%) rename src/koi_net/processor/{default_handlers.py => handlers.py} (99%) rename src/koi_net/{ => processor}/kobj_worker.py (76%) rename src/koi_net/processor/{knowledge_pipeline.py => pipeline.py} (98%) create mode 100644 src/koi_net/sentry.py diff --git a/src/koi_net/__init__.py b/src/koi_net/__init__.py index 80782ee..b045be0 100644 --- a/src/koi_net/__init__.py +++ b/src/koi_net/__init__.py @@ -1,2 +1,3 @@ from .core import NodeContainer -from . import log \ No newline at end of file +from . import logger +from . import sentry \ No newline at end of file diff --git a/src/koi_net/behaviors.py b/src/koi_net/behaviors.py deleted file mode 100644 index a639d4c..0000000 --- a/src/koi_net/behaviors.py +++ /dev/null @@ -1,48 +0,0 @@ -import structlog -from rid_lib.ext import Cache -from rid_lib.types import KoiNetNode -from rid_lib import RIDType -from koi_net.identity import NodeIdentity -from koi_net.network.event_queue import EventQueue -from koi_net.network.request_handler import RequestHandler -from koi_net.network.resolver import NetworkResolver -from koi_net.processor.kobj_queue import KobjQueue -from koi_net.protocol.api_models import ErrorResponse - -log = structlog.stdlib.get_logger() - - -class Behaviors: - def __init__(self, cache: Cache, identity: NodeIdentity, event_queue: EventQueue, resolver: NetworkResolver, request_handler: RequestHandler, kobj_queue: KobjQueue): - self.cache = cache - self.identity = identity - self.event_queue = event_queue - self.resolver = resolver - self.request_handler = request_handler - self.kobj_queue = kobj_queue - - def identify_coordinators(self) -> list[KoiNetNode]: - """Returns node's providing state for `orn:koi-net.node`.""" - return self.resolver.get_state_providers(KoiNetNode) - - def catch_up_with(self, target: KoiNetNode, rid_types: list[RIDType] = []): - """Fetches and processes knowledge objects from target node. - Args: - target: Node to catch up with - rid_types: RID types to fetch from target (all types if list is empty) - """ - log.debug(f"catching up with {target} on {rid_types or 'all types'}") - payload = self.request_handler.fetch_manifests( - node=target, - rid_types=rid_types - ) - if type(payload) == ErrorResponse: - log.debug("failed to reach node") - return - for manifest in payload.manifests: - if manifest.rid == self.identity.rid: - continue - self.kobj_queue.put_kobj( - manifest=manifest, - source=target - ) \ No newline at end of file diff --git a/src/koi_net/context.py b/src/koi_net/context.py index 7f70806..2c57e5a 100644 --- a/src/koi_net/context.py +++ b/src/koi_net/context.py @@ -1,5 +1,6 @@ from rid_lib.ext import Cache +from koi_net.effector import Effector from koi_net.network.resolver import NetworkResolver from .config import NodeConfig from .network.graph import NetworkGraph @@ -32,6 +33,7 @@ class HandlerContext: graph: NetworkGraph request_handler: RequestHandler resolver: NetworkResolver + effector: Effector def __init__( self, @@ -43,6 +45,7 @@ def __init__( graph: NetworkGraph, request_handler: RequestHandler, resolver: NetworkResolver, + effector: Effector ): self.identity = identity self.config = config @@ -51,4 +54,5 @@ def __init__( self.kobj_queue = kobj_queue self.graph = graph self.request_handler = request_handler - self.resolver = resolver \ No newline at end of file + self.resolver = resolver + self.effector = effector \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 5ef4f52..14ff750 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,12 +1,11 @@ from dataclasses import dataclass from rid_lib.ext import Cache -from koi_net.behaviors import Behaviors from koi_net.config import NodeConfig from koi_net.context import ActionContext, HandlerContext from koi_net.effector import Effector from koi_net.handshaker import Handshaker from koi_net.identity import NodeIdentity -from koi_net.kobj_worker import KnowledgeProcessingWorker +from koi_net.processor.kobj_worker import KnowledgeProcessingWorker from koi_net.lifecycle import NodeLifecycle from koi_net.network.error_handler import ErrorHandler from koi_net.network.event_queue import EventQueue @@ -14,9 +13,9 @@ from koi_net.network.request_handler import RequestHandler from koi_net.network.resolver import NetworkResolver from koi_net.network.response_handler import ResponseHandler -from koi_net.poll_event_buffer import PollEventBuffer +from koi_net.network.poll_event_buffer import PollEventBuffer from koi_net.poller import NodePoller -from koi_net.processor.default_handlers import ( +from koi_net.processor.handlers import ( basic_manifest_handler, basic_network_output_filter, basic_rid_handler, @@ -26,7 +25,7 @@ secure_profile_handler ) from koi_net.processor.event_worker import EventProcessingWorker -from koi_net.processor.knowledge_pipeline import KnowledgePipeline +from koi_net.processor.pipeline import KnowledgePipeline from koi_net.processor.kobj_queue import KobjQueue from koi_net.secure import Secure from koi_net.server import NodeServer @@ -45,8 +44,8 @@ class NodeContainer: request_handler: RequestHandler response_handler: ResponseHandler resolver: NetworkResolver + effector: Effector handler_context: HandlerContext - behaviors: Behaviors pipeline: KnowledgePipeline kobj_worker: KnowledgeProcessingWorker event_worker: EventProcessingWorker @@ -54,7 +53,6 @@ class NodeContainer: lifecycle: NodeLifecycle server: NodeServer poller: NodePoller - class NodeAssembler: poll_event_buf = PollEventBuffer @@ -81,7 +79,6 @@ class NodeAssembler: handler_context = HandlerContext action_context = ActionContext effector = Effector - behaviors = Behaviors pipeline = KnowledgePipeline kobj_worker = KnowledgeProcessingWorker event_worker = EventProcessingWorker @@ -136,16 +133,6 @@ def create(cls) -> NodeContainer: graph=graph, request_handler=request_handler ) - handler_context = cls.handler_context( - identity=identity, - config=config, - cache=cache, - event_queue=event_queue, - kobj_queue=kobj_queue, - graph=graph, - request_handler=request_handler, - resolver=resolver - ) action_context = cls.action_context( identity=identity ) @@ -155,13 +142,15 @@ def create(cls) -> NodeContainer: kobj_queue=kobj_queue, action_context=action_context ) - behaviors = cls.behaviors( - cache=cache, + handler_context = cls.handler_context( identity=identity, + config=config, + cache=cache, event_queue=event_queue, - resolver=resolver, + kobj_queue=kobj_queue, + graph=graph, request_handler=request_handler, - kobj_queue=kobj_queue + resolver=resolver ) pipeline = cls.pipeline( handler_context=handler_context, @@ -192,7 +181,7 @@ def create(cls) -> NodeContainer: event_worker=event_worker, cache=cache, handshaker=handshaker, - behaviors=behaviors + request_handler=request_handler ) server = cls.server( config=config, @@ -221,8 +210,8 @@ def create(cls) -> NodeContainer: request_handler=request_handler, response_handler=response_handler, resolver=resolver, + effector=effector, handler_context=handler_context, - behaviors=behaviors, pipeline=pipeline, kobj_worker=kobj_worker, event_worker=event_worker, diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index 323e5c9..674d5f6 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -4,13 +4,13 @@ from rid_lib.ext import Bundle, Cache from rid_lib.types import KoiNetNode -from koi_net.behaviors import Behaviors -from koi_net.handshaker import Handshaker -from koi_net.kobj_worker import KnowledgeProcessingWorker -from koi_net.models import END -from koi_net.network.event_queue import EventQueue -from koi_net.processor.event_worker import EventProcessingWorker - +from .handshaker import Handshaker +from .network.request_handler import RequestHandler +from .processor.kobj_worker import KnowledgeProcessingWorker +from .network.event_queue import EventQueue +from .processor.event_worker import EventProcessingWorker +from .protocol.api_models import ErrorResponse +from .worker import STOP_WORKER from .config import NodeConfig from .processor.kobj_queue import KobjQueue from .network.graph import NetworkGraph @@ -31,7 +31,7 @@ class NodeLifecycle: event_worker: EventProcessingWorker cache: Cache handshaker: Handshaker - behaviors: Behaviors + request_handler: RequestHandler def __init__( self, @@ -44,7 +44,7 @@ def __init__( event_worker: EventProcessingWorker, cache: Cache, handshaker: Handshaker, - behaviors: Behaviors + request_handler: RequestHandler ): self.config = config self.identity = identity @@ -55,7 +55,7 @@ def __init__( self.event_worker = event_worker self.cache = cache self.handshaker = handshaker - self.behaviors = behaviors + self.request_handler = request_handler @contextmanager def run(self): @@ -107,20 +107,28 @@ def start(self): log.debug("Waiting for kobj queue to empty") self.kobj_queue.q.join() - # TODO: FACTOR OUT BEHAVIOR - coordinators = self.graph.get_neighbors(direction="in", allowed_type=KoiNetNode) - if len(coordinators) == 0 and self.config.koi_net.first_contact.rid: + if len(coordinators) > 0: + for coordinator in coordinators: + payload = self.request_handler.fetch_manifests( + node=coordinator, + rid_types=[KoiNetNode] + ) + if type(payload) is ErrorResponse: + continue + + for manifest in payload.manifests: + self.kobj_queue.put_kobj( + manifest=manifest, + source=coordinator + ) + + elif self.config.koi_net.first_contact.rid: log.debug(f"I don't have any edges with coordinators, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") self.handshaker.handshake_with(self.config.koi_net.first_contact.rid) - - - for coordinator in self.behaviors.identify_coordinators(): - self.behaviors.catch_up_with(coordinator, rid_types=[KoiNetNode]) - def stop(self): """Stops a node. @@ -129,5 +137,5 @@ def stop(self): """ log.info(f"Waiting for kobj queue to empty ({self.kobj_queue.q.unfinished_tasks} tasks remaining)") - self.kobj_queue.q.put(END) - self.event_queue.q.put(END) \ No newline at end of file + self.kobj_queue.q.put(STOP_WORKER) + self.event_queue.q.put(STOP_WORKER) \ No newline at end of file diff --git a/src/koi_net/log.py b/src/koi_net/logger.py similarity index 87% rename from src/koi_net/log.py rename to src/koi_net/logger.py index e063551..37f59a2 100644 --- a/src/koi_net/log.py +++ b/src/koi_net/logger.py @@ -4,13 +4,32 @@ import colorama import structlog import sys +# from sentry_sdk import logger as sentry_logger def my_processor(_, __, event: dict): # print(_, __, event) - event["path"] = event["logger"] + "." + event.pop("func_name") + event["path"] = event["module"] + "." + event["func_name"] return event +# def sentry_processor(_, method, event: dict): +# print(event) +# if method == "critical": +# sentry_logger.fatal( +# event["event"], +# attributes=event +# ) +# elif method == "info": +# sentry_logger.info( +# event["event"], +# attributes=event +# ) +# elif method == "debug": +# sentry_logger.debug( +# event["event"], +# attributes=event +# ) +# return event console_renderer = structlog.dev.ConsoleRenderer( columns=[ @@ -45,16 +64,7 @@ def my_processor(_, __, event: dict): ) ), # Render the event without the key name in bright magenta. - structlog.dev.Column( - "event", - structlog.dev.KeyValueColumnFormatter( - key_style=None, - value_style=colorama.Fore.WHITE, - reset_style=colorama.Style.RESET_ALL, - value_repr=str, - width=30 - ), - ), + # Default formatter for all keys not explicitly mentioned. The key is # cyan, the value is green. structlog.dev.Column( @@ -64,7 +74,7 @@ def my_processor(_, __, event: dict): value_style=colorama.Fore.MAGENTA, reset_style=colorama.Style.RESET_ALL, value_repr=str, - postfix=":" + width=30 ), ), # structlog.dev.Column( @@ -74,8 +84,21 @@ def my_processor(_, __, event: dict): # value_style=colorama.Fore.MAGENTA, # reset_style=colorama.Style.RESET_ALL, # value_repr=str, + # prefix="(", + # postfix=")", + # width=15 # ), # ), + structlog.dev.Column( + "event", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Fore.WHITE, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + width=30 + ), + ), structlog.dev.Column( "", structlog.dev.KeyValueColumnFormatter( @@ -119,6 +142,7 @@ def my_processor(_, __, event: dict): ), my_processor, # Render the final event dict as JSON. + # sentry_processor, console_renderer # structlog.processors.JSONRenderer() diff --git a/src/koi_net/models.py b/src/koi_net/models.py deleted file mode 100644 index b5e29b4..0000000 --- a/src/koi_net/models.py +++ /dev/null @@ -1,14 +0,0 @@ -from pydantic import BaseModel -from rid_lib.types import KoiNetNode -from koi_net.protocol.event import Event - -class End: - """Class for a sentinel value by knowledge handlers.""" - pass - -END = End() - - -class QueuedEvent(BaseModel): - event: Event - target: KoiNetNode \ No newline at end of file diff --git a/src/koi_net/network/event_queue.py b/src/koi_net/network/event_queue.py index 0578643..8db2db1 100644 --- a/src/koi_net/network/event_queue.py +++ b/src/koi_net/network/event_queue.py @@ -2,13 +2,17 @@ from queue import Queue from rid_lib.types import KoiNetNode +from pydantic import BaseModel -from ..models import QueuedEvent from ..protocol.event import Event log = structlog.stdlib.get_logger() +class QueuedEvent(BaseModel): + event: Event + target: KoiNetNode + class EventQueue: """Handles out going network event queues.""" q: Queue[QueuedEvent] @@ -20,9 +24,7 @@ def push_event_to(self, event: Event, target: KoiNetNode): """Pushes event to queue of specified node. Event will be sent to webhook or poll queue depending on the - node type and edge type of the specified node. If `flush` is set - to `True`, the webhook queued will be flushed after pushing the - event. + node type and edge type of the specified node. """ self.q.put(QueuedEvent(target=target, event=event)) diff --git a/src/koi_net/poll_event_buffer.py b/src/koi_net/network/poll_event_buffer.py similarity index 100% rename from src/koi_net/poll_event_buffer.py rename to src/koi_net/network/poll_event_buffer.py diff --git a/src/koi_net/processor/event_worker.py b/src/koi_net/processor/event_worker.py index 1cf405b..ed37cb7 100644 --- a/src/koi_net/processor/event_worker.py +++ b/src/koi_net/processor/event_worker.py @@ -7,13 +7,12 @@ from rid_lib.types import KoiNetNode from koi_net.config import NodeConfig -from koi_net.models import END, QueuedEvent -from koi_net.network.event_queue import EventQueue +from koi_net.network.event_queue import EventQueue, QueuedEvent from koi_net.network.request_handler import RequestHandler -from koi_net.poll_event_buffer import PollEventBuffer +from koi_net.network.poll_event_buffer import PollEventBuffer from koi_net.protocol.event import Event from koi_net.protocol.node import NodeProfile, NodeType -from koi_net.worker import ThreadWorker +from koi_net.worker import ThreadWorker, STOP_WORKER log = structlog.stdlib.get_logger() @@ -86,8 +85,8 @@ def run(self): item = self.event_queue.q.get(timeout=self.timeout) try: - if item is END: - log.info("Received 'END' signal, flushing buffer...") + if item is STOP_WORKER: + log.info(f"Received 'STOP_WORKER' signal, flushing buffer...") for target in self.event_buffer.keys(): self.flush_buffer(target, self.event_buffer[target]) return diff --git a/src/koi_net/processor/default_handlers.py b/src/koi_net/processor/handlers.py similarity index 99% rename from src/koi_net/processor/default_handlers.py rename to src/koi_net/processor/handlers.py index 2ff0cfb..0ac899e 100644 --- a/src/koi_net/processor/default_handlers.py +++ b/src/koi_net/processor/handlers.py @@ -9,7 +9,7 @@ from .knowledge_object import KnowledgeObject from ..context import HandlerContext from ..protocol.event import Event, EventType -from ..protocol.edge import EdgeProfile, EdgeStatus, EdgeType, generate_edge_bundle +from ..protocol.edge import EdgeProfile, EdgeStatus, EdgeType from ..protocol.node import NodeProfile log = structlog.stdlib.get_logger() diff --git a/src/koi_net/kobj_worker.py b/src/koi_net/processor/kobj_worker.py similarity index 76% rename from src/koi_net/kobj_worker.py rename to src/koi_net/processor/kobj_worker.py index dac4dee..a5b69eb 100644 --- a/src/koi_net/kobj_worker.py +++ b/src/koi_net/processor/kobj_worker.py @@ -2,10 +2,9 @@ import traceback import structlog -from koi_net.models import END -from koi_net.processor.knowledge_pipeline import KnowledgePipeline -from koi_net.processor.kobj_queue import KobjQueue -from koi_net.worker import ThreadWorker +from .pipeline import KnowledgePipeline +from .kobj_queue import KobjQueue +from koi_net.worker import ThreadWorker, STOP_WORKER log = structlog.stdlib.get_logger() @@ -28,8 +27,8 @@ def run(self): try: item = self.kobj_queue.q.get(timeout=self.timeout) try: - if item is END: - log.info("Received 'END' signal, shutting down...") + if item is STOP_WORKER: + log.info("Received 'STOP_WORKER' signal, shutting down...") return log.info(f"Dequeued {item!r}") diff --git a/src/koi_net/processor/knowledge_pipeline.py b/src/koi_net/processor/pipeline.py similarity index 98% rename from src/koi_net/processor/knowledge_pipeline.py rename to src/koi_net/processor/pipeline.py index 7076591..0e6fd81 100644 --- a/src/koi_net/processor/knowledge_pipeline.py +++ b/src/koi_net/processor/pipeline.py @@ -13,16 +13,13 @@ StopChain ) from .knowledge_object import KnowledgeObject - -from typing import TYPE_CHECKING -if TYPE_CHECKING: - from ..context import HandlerContext +from ..context import HandlerContext log = structlog.stdlib.get_logger() class KnowledgePipeline: - handler_context: "HandlerContext" + handler_context: HandlerContext cache: Cache identity: NodeIdentity request_handler: RequestHandler @@ -32,7 +29,7 @@ class KnowledgePipeline: def __init__( self, - handler_context: "HandlerContext", + handler_context: HandlerContext, cache: Cache, request_handler: RequestHandler, event_queue: EventQueue, diff --git a/src/koi_net/protocol/envelope.py b/src/koi_net/protocol/envelope.py index 964932b..cfe8141 100644 --- a/src/koi_net/protocol/envelope.py +++ b/src/koi_net/protocol/envelope.py @@ -6,7 +6,6 @@ from .secure import PrivateKey, PublicKey from .api_models import RequestModels, ResponseModels - log = structlog.stdlib.get_logger() diff --git a/src/koi_net/sentry.py b/src/koi_net/sentry.py new file mode 100644 index 0000000..822a14d --- /dev/null +++ b/src/koi_net/sentry.py @@ -0,0 +1,13 @@ +# import sentry_sdk +# from sentry_sdk.integrations.logging import LoggingIntegration + +# sentry_sdk.init( +# dsn="https://7bbafef3c7dbd652506db3cb2aca9f98@o4510149352357888.ingest.us.sentry.io/4510149355765760", +# # Add data like request headers and IP for users, +# # see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info +# send_default_pii=True, +# enable_logs=True, +# integrations=[ +# LoggingIntegration(sentry_logs_level=None) +# ] +# ) \ No newline at end of file diff --git a/src/koi_net/server.py b/src/koi_net/server.py index af494db..4d8f28a 100644 --- a/src/koi_net/server.py +++ b/src/koi_net/server.py @@ -4,7 +4,7 @@ from fastapi import FastAPI, APIRouter from fastapi.responses import JSONResponse -from koi_net.poll_event_buffer import PollEventBuffer +from koi_net.network.poll_event_buffer import PollEventBuffer from .network.response_handler import ResponseHandler from .processor.kobj_queue import KobjQueue from .protocol.api_models import ( @@ -99,7 +99,9 @@ def run(self): uvicorn.run( app=self.app, host=self.config.server.host, - port=self.config.server.port + port=self.config.server.port, + log_config=None, + access_log=False ) def protocol_error_handler(self, request, exc: ProtocolError): diff --git a/src/koi_net/worker.py b/src/koi_net/worker.py index 20394d0..fa3b7b0 100644 --- a/src/koi_net/worker.py +++ b/src/koi_net/worker.py @@ -1,5 +1,12 @@ import threading + +class End: + """Class for a sentinel value by knowledge handlers.""" + pass + +STOP_WORKER = End() + class ThreadWorker: thread: threading.Thread From 774d7b1eb180efbe7e321ce7fc74871688d38629 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 14 Oct 2025 16:40:12 -0400 Subject: [PATCH 14/53] factored out action context for now, finally added support for "limit" in poll events request, added ApiModels as a discriminated union using type consts in the request/payload models, added a model map linking API paths (aka methods) to request and response models (both raw and enveloped versions) -- refactored request handler accordingly, major overhaul moving a lot of logic from server -> response_handler: wraps with envelope validation into a single method -- entrypoints (poller, server) should be largely interfaces not contain much logic themselves, --- src/koi_net/context.py | 12 --- src/koi_net/core.py | 27 +++---- src/koi_net/effector.py | 18 ++++- src/koi_net/{ => interfaces}/worker.py | 0 src/koi_net/lifecycle.py | 2 +- src/koi_net/network/poll_event_buffer.py | 13 +++- src/koi_net/network/request_handler.py | 24 +++--- src/koi_net/network/response_handler.py | 73 +++++++++++++++++- src/koi_net/poller.py | 4 +- src/koi_net/processor/event_worker.py | 2 +- src/koi_net/processor/kobj_worker.py | 2 +- src/koi_net/protocol/api_models.py | 7 +- src/koi_net/protocol/model_map.py | 61 +++++++++++++++ src/koi_net/server.py | 94 +++++------------------- 14 files changed, 208 insertions(+), 131 deletions(-) rename src/koi_net/{ => interfaces}/worker.py (100%) create mode 100644 src/koi_net/protocol/model_map.py diff --git a/src/koi_net/context.py b/src/koi_net/context.py index 2c57e5a..a1713df 100644 --- a/src/koi_net/context.py +++ b/src/koi_net/context.py @@ -10,18 +10,6 @@ from .processor.kobj_queue import KobjQueue -class ActionContext: - """Provides action handlers access to other subsystems.""" - - identity: NodeIdentity - - def __init__( - self, - identity: NodeIdentity, - ): - self.identity = identity - - class HandlerContext: """Provides knowledge handlers access to other subsystems.""" diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 14ff750..571a6df 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from rid_lib.ext import Cache from koi_net.config import NodeConfig -from koi_net.context import ActionContext, HandlerContext +from koi_net.context import HandlerContext from koi_net.effector import Effector from koi_net.handshaker import Handshaker from koi_net.identity import NodeIdentity @@ -77,7 +77,6 @@ class NodeAssembler: forget_edge_on_node_deletion ] handler_context = HandlerContext - action_context = ActionContext effector = Effector pipeline = KnowledgePipeline kobj_worker = KnowledgeProcessingWorker @@ -89,7 +88,7 @@ class NodeAssembler: @classmethod def create(cls) -> NodeContainer: - poll_event_buffer = cls.poll_event_buf() + poll_event_buf = cls.poll_event_buf() kobj_queue = cls.kobj_queue() event_queue = cls.event_queue() config = cls.config.load_from_yaml() @@ -124,7 +123,10 @@ def create(cls) -> NodeContainer: error_handler=error_handler ) response_handler = cls.response_handler( - cache=cache + cache=cache, + kobj_queue=kobj_queue, + poll_event_buf=poll_event_buf, + secure=secure ) resolver = cls.resolver( config=config, @@ -133,14 +135,11 @@ def create(cls) -> NodeContainer: graph=graph, request_handler=request_handler ) - action_context = cls.action_context( - identity=identity - ) effector = cls.effector( cache=cache, resolver=resolver, kobj_queue=kobj_queue, - action_context=action_context + identity=identity ) handler_context = cls.handler_context( identity=identity, @@ -150,7 +149,8 @@ def create(cls) -> NodeContainer: kobj_queue=kobj_queue, graph=graph, request_handler=request_handler, - resolver=resolver + resolver=resolver, + effector=effector ) pipeline = cls.pipeline( handler_context=handler_context, @@ -169,7 +169,7 @@ def create(cls) -> NodeContainer: cache=cache, event_queue=event_queue, request_handler=request_handler, - poll_event_buf=poll_event_buffer + poll_event_buf=poll_event_buf ) lifecycle = cls.lifecycle( config=config, @@ -186,10 +186,7 @@ def create(cls) -> NodeContainer: server = cls.server( config=config, lifecycle=lifecycle, - secure=secure, - kobj_queue=kobj_queue, - response_handler=response_handler, - poll_event_buf=poll_event_buffer + response_handler=response_handler ) poller = cls.poller( kobj_queue=kobj_queue, @@ -199,7 +196,7 @@ def create(cls) -> NodeContainer: ) return NodeContainer( - poll_event_buf=poll_event_buffer, + poll_event_buf=poll_event_buf, kobj_queue=kobj_queue, event_queue=event_queue, config=config, diff --git a/src/koi_net/effector.py b/src/koi_net/effector.py index c142724..f5871a1 100644 --- a/src/koi_net/effector.py +++ b/src/koi_net/effector.py @@ -6,11 +6,23 @@ from rid_lib.types import KoiNetNode from .network.resolver import NetworkResolver from .processor.kobj_queue import KobjQueue -from .context import ActionContext +from .identity import NodeIdentity log = structlog.stdlib.get_logger() +class ActionContext: + """Provides action handlers access to other subsystems.""" + + identity: NodeIdentity + + def __init__( + self, + identity: NodeIdentity, + ): + self.identity = identity + + class BundleSource(StrEnum): CACHE = "CACHE" ACTION = "ACTION" @@ -35,12 +47,12 @@ def __init__( cache: Cache, resolver: NetworkResolver, kobj_queue: KobjQueue, - action_context: ActionContext + identity: NodeIdentity ): self.cache = cache self.resolver = resolver self.kobj_queue = kobj_queue - self.action_context = action_context + self.action_context = ActionContext(identity) self._action_table = self.__class__._action_table.copy() @classmethod diff --git a/src/koi_net/worker.py b/src/koi_net/interfaces/worker.py similarity index 100% rename from src/koi_net/worker.py rename to src/koi_net/interfaces/worker.py diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index 674d5f6..771ae8e 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -10,7 +10,7 @@ from .network.event_queue import EventQueue from .processor.event_worker import EventProcessingWorker from .protocol.api_models import ErrorResponse -from .worker import STOP_WORKER +from .interfaces.worker import STOP_WORKER from .config import NodeConfig from .processor.kobj_queue import KobjQueue from .network.graph import NetworkGraph diff --git a/src/koi_net/network/poll_event_buffer.py b/src/koi_net/network/poll_event_buffer.py index cd83997..d26d2a6 100644 --- a/src/koi_net/network/poll_event_buffer.py +++ b/src/koi_net/network/poll_event_buffer.py @@ -13,5 +13,14 @@ def put(self, node: KoiNetNode, event: Event): event_buf = self.buffers.setdefault(node, []) event_buf.append(event) - def flush(self, node: KoiNetNode): - return self.buffers.pop(node, []) \ No newline at end of file + def flush(self, node: KoiNetNode, limit: int = 0): + event_buf = self.buffers.get(node, []) + + if limit and len(event_buf) > limit: + to_return = event_buf[:limit] + self.buffers[node] = event_buf[limit:] + else: + to_return = event_buf.copy() + self.buffers[node] = [] + + return to_return \ No newline at end of file diff --git a/src/koi_net/network/request_handler.py b/src/koi_net/network/request_handler.py index 6042680..6f4a834 100644 --- a/src/koi_net/network/request_handler.py +++ b/src/koi_net/network/request_handler.py @@ -4,6 +4,8 @@ from rid_lib.ext import Cache from rid_lib.types.koi_net_node import KoiNetNode +from koi_net.protocol.model_map import API_MODEL_MAP + from ..identity import NodeIdentity from ..protocol.api_models import ( RidsPayload, @@ -115,7 +117,10 @@ def make_request( ) try: - result = httpx.post(url, data=signed_envelope.model_dump_json(exclude_none=True)) + result = httpx.post( + url, + data=signed_envelope.model_dump_json(exclude_none=True) + ) except httpx.ConnectError as err: log.debug("Failed to connect") self.error_handler.handle_connection_error(node) @@ -126,20 +131,11 @@ def make_request( self.error_handler.handle_protocol_error(resp.error, node) return resp - if path == BROADCAST_EVENTS_PATH: - return None - elif path == POLL_EVENTS_PATH: - EnvelopeModel = SignedEnvelope[EventsPayload] - elif path == FETCH_RIDS_PATH: - EnvelopeModel = SignedEnvelope[RidsPayload] - elif path == FETCH_MANIFESTS_PATH: - EnvelopeModel = SignedEnvelope[ManifestsPayload] - elif path == FETCH_BUNDLES_PATH: - EnvelopeModel = SignedEnvelope[BundlesPayload] - else: - raise UnknownPathError(f"Unknown path '{path}'") + resp_env_model = API_MODEL_MAP[path].response_envelope + if resp_env_model is None: + return - resp_envelope = EnvelopeModel.model_validate_json(result.text) + resp_envelope = resp_env_model.model_validate_json(result.text) self.secure.validate_envelope(resp_envelope) return resp_envelope.payload diff --git a/src/koi_net/network/response_handler.py b/src/koi_net/network/response_handler.py index c1722fd..9a7fbe7 100644 --- a/src/koi_net/network/response_handler.py +++ b/src/koi_net/network/response_handler.py @@ -4,7 +4,17 @@ from rid_lib.ext import Manifest, Cache from rid_lib.ext.bundle import Bundle +from koi_net.network.poll_event_buffer import PollEventBuffer +from koi_net.processor.kobj_queue import KobjQueue +from koi_net.protocol.consts import BROADCAST_EVENTS_PATH, FETCH_BUNDLES_PATH, FETCH_MANIFESTS_PATH, FETCH_RIDS_PATH, POLL_EVENTS_PATH +from koi_net.protocol.envelope import SignedEnvelope +from koi_net.protocol.model_map import API_MODEL_MAP +from koi_net.secure import Secure + from ..protocol.api_models import ( + ApiModels, + EventsPayload, + PollEvents, RidsPayload, ManifestsPayload, BundlesPayload, @@ -20,21 +30,72 @@ class ResponseHandler: """Handles generating responses to requests from other KOI nodes.""" cache: Cache + kobj_queue: KobjQueue + poll_event_buf: PollEventBuffer def __init__( self, - cache: Cache, + cache: Cache, + kobj_queue: KobjQueue, + poll_event_buf: PollEventBuffer, + secure: Secure ): self.cache = cache + self.kobj_queue = kobj_queue + self.poll_event_buf = poll_event_buf + self.secure = secure + + def handle_response(self, path: str, req: SignedEnvelope): + self.secure.validate_envelope(req) + + response_map = { + BROADCAST_EVENTS_PATH: self.broadcast_events_handler, + POLL_EVENTS_PATH: self.poll_events_handler, + FETCH_RIDS_PATH: self.fetch_rids_handler, + FETCH_MANIFESTS_PATH: self.fetch_manifests_handler, + FETCH_BUNDLES_PATH: self.fetch_bundles_handler + } + + response = response_map[path](req.payload, req.source_node) + + if response is None: + return + + return self.secure.create_envelope( + payload=response, + target=req.source_node + ) - def fetch_rids(self, req: FetchRids, source: KoiNetNode) -> RidsPayload: + def broadcast_events_handler(self, req: EventsPayload, source: KoiNetNode): + log.info(f"Request to broadcast events, received {len(req.events)} event(s)") + + for event in req.events: + self.kobj_queue.put_kobj(event=event, source=source) + + def poll_events_handler( + self, + req: PollEvents, + source: KoiNetNode + ) -> EventsPayload: + log.info(f"Request to poll events") + events = self.poll_event_buf.flush(source, limit=req.limit) + return EventsPayload(events=events) + + def fetch_rids_handler( + self, + req: FetchRids, + source: KoiNetNode + ) -> RidsPayload: """Returns response to fetch RIDs request.""" log.info(f"Request to fetch rids, allowed types {req.rid_types}") rids = self.cache.list_rids(req.rid_types) return RidsPayload(rids=rids) - def fetch_manifests(self, req: FetchManifests, source: KoiNetNode) -> ManifestsPayload: + def fetch_manifests_handler(self, + req: FetchManifests, + source: KoiNetNode + ) -> ManifestsPayload: """Returns response to fetch manifests request.""" log.info(f"Request to fetch manifests, allowed types {req.rid_types}, rids {req.rids}") @@ -50,7 +111,11 @@ def fetch_manifests(self, req: FetchManifests, source: KoiNetNode) -> ManifestsP return ManifestsPayload(manifests=manifests, not_found=not_found) - def fetch_bundles(self, req: FetchBundles, source: KoiNetNode) -> BundlesPayload: + def fetch_bundles_handler( + self, + req: FetchBundles, + source: KoiNetNode + ) -> BundlesPayload: """Returns response to fetch bundles request.""" log.info(f"Request to fetch bundles, requested rids {req.rids}") diff --git a/src/koi_net/poller.py b/src/koi_net/poller.py index 258cf8d..a5633bf 100644 --- a/src/koi_net/poller.py +++ b/src/koi_net/poller.py @@ -18,10 +18,10 @@ class NodePoller: def __init__( self, - kobj_queue: KobjQueue, + config: NodeConfig, lifecycle: NodeLifecycle, + kobj_queue: KobjQueue, resolver: NetworkResolver, - config: NodeConfig ): self.kobj_queue = kobj_queue self.lifecycle = lifecycle diff --git a/src/koi_net/processor/event_worker.py b/src/koi_net/processor/event_worker.py index ed37cb7..01d636e 100644 --- a/src/koi_net/processor/event_worker.py +++ b/src/koi_net/processor/event_worker.py @@ -12,7 +12,7 @@ from koi_net.network.poll_event_buffer import PollEventBuffer from koi_net.protocol.event import Event from koi_net.protocol.node import NodeProfile, NodeType -from koi_net.worker import ThreadWorker, STOP_WORKER +from koi_net.interfaces.worker import ThreadWorker, STOP_WORKER log = structlog.stdlib.get_logger() diff --git a/src/koi_net/processor/kobj_worker.py b/src/koi_net/processor/kobj_worker.py index a5b69eb..8f2963d 100644 --- a/src/koi_net/processor/kobj_worker.py +++ b/src/koi_net/processor/kobj_worker.py @@ -4,7 +4,7 @@ from .pipeline import KnowledgePipeline from .kobj_queue import KobjQueue -from koi_net.worker import ThreadWorker, STOP_WORKER +from koi_net.interfaces.worker import ThreadWorker, STOP_WORKER log = structlog.stdlib.get_logger() diff --git a/src/koi_net/protocol/api_models.py b/src/koi_net/protocol/api_models.py index ae11f72..2189f48 100644 --- a/src/koi_net/protocol/api_models.py +++ b/src/koi_net/protocol/api_models.py @@ -1,6 +1,6 @@ """Pydantic models for request and response objects in the KOI-net API.""" -from typing import Literal +from typing import Annotated, Literal from pydantic import BaseModel, Field from rid_lib import RID, RIDType from rid_lib.ext import Bundle, Manifest @@ -60,4 +60,7 @@ class ErrorResponse(BaseModel): type RequestModels = EventsPayload | PollEvents | FetchRids | FetchManifests | FetchBundles type ResponseModels = RidsPayload | ManifestsPayload | BundlesPayload | EventsPayload | ErrorResponse -type ApiModels = RequestModels | ResponseModels \ No newline at end of file +type ApiModels = Annotated[ + RequestModels | ResponseModels, + Field(discriminator="type") +] \ No newline at end of file diff --git a/src/koi_net/protocol/model_map.py b/src/koi_net/protocol/model_map.py new file mode 100644 index 0000000..79ed6c2 --- /dev/null +++ b/src/koi_net/protocol/model_map.py @@ -0,0 +1,61 @@ +from typing import NamedTuple +from pydantic import BaseModel +from .envelope import SignedEnvelope +from .consts import ( + BROADCAST_EVENTS_PATH, + POLL_EVENTS_PATH, + FETCH_BUNDLES_PATH, + FETCH_MANIFESTS_PATH, + FETCH_RIDS_PATH +) +from .api_models import ( + EventsPayload, + PollEvents, + FetchBundles, + BundlesPayload, + FetchManifests, + ManifestsPayload, + FetchRids, + RidsPayload +) + + +class Models(NamedTuple): + request: type[BaseModel] + response: type[BaseModel] | None + request_envelope: type[SignedEnvelope] + response_envelope: type[SignedEnvelope] | None + + +API_MODEL_MAP: dict[str, Models] = { + BROADCAST_EVENTS_PATH: Models( + request=EventsPayload, + response=None, + request_envelope=SignedEnvelope[EventsPayload], + response_envelope=None + ), + POLL_EVENTS_PATH: Models( + request=PollEvents, + response=EventsPayload, + request_envelope=SignedEnvelope[PollEvents], + response_envelope=SignedEnvelope[EventsPayload] + ), + FETCH_BUNDLES_PATH: Models( + request=FetchBundles, + response=BundlesPayload, + request_envelope=SignedEnvelope[FetchBundles], + response_envelope=SignedEnvelope[BundlesPayload] + ), + FETCH_MANIFESTS_PATH: Models( + request=FetchManifests, + response=ManifestsPayload, + request_envelope=SignedEnvelope[FetchManifests], + response_envelope=SignedEnvelope[ManifestsPayload] + ), + FETCH_RIDS_PATH: Models( + request=FetchRids, + response=RidsPayload, + request_envelope=SignedEnvelope[FetchRids], + response_envelope=SignedEnvelope[RidsPayload] + ) +} \ No newline at end of file diff --git a/src/koi_net/server.py b/src/koi_net/server.py index 4d8f28a..f405b99 100644 --- a/src/koi_net/server.py +++ b/src/koi_net/server.py @@ -4,30 +4,10 @@ from fastapi import FastAPI, APIRouter from fastapi.responses import JSONResponse -from koi_net.network.poll_event_buffer import PollEventBuffer from .network.response_handler import ResponseHandler -from .processor.kobj_queue import KobjQueue -from .protocol.api_models import ( - PollEvents, - FetchRids, - FetchManifests, - FetchBundles, - EventsPayload, - RidsPayload, - ManifestsPayload, - BundlesPayload, - ErrorResponse -) +from .protocol.model_map import API_MODEL_MAP +from .protocol.api_models import ErrorResponse from .protocol.errors import ProtocolError -from .protocol.envelope import SignedEnvelope -from .protocol.consts import ( - BROADCAST_EVENTS_PATH, - POLL_EVENTS_PATH, - FETCH_RIDS_PATH, - FETCH_MANIFESTS_PATH, - FETCH_BUNDLES_PATH -) -from .secure import Secure from .lifecycle import NodeLifecycle from .config import NodeConfig @@ -38,9 +18,6 @@ class NodeServer: """Manages FastAPI server and event handling for full nodes.""" config: NodeConfig lifecycle: NodeLifecycle - secure: Secure - kobj_queue: KobjQueue - poll_event_buf: PollEventBuffer response_handler: ResponseHandler app: FastAPI router: APIRouter @@ -49,16 +26,10 @@ def __init__( self, config: NodeConfig, lifecycle: NodeLifecycle, - secure: Secure, - kobj_queue: KobjQueue, - poll_event_buf: PollEventBuffer, - response_handler: ResponseHandler + response_handler: ResponseHandler, ): self.config = config self.lifecycle = lifecycle - self.secure = secure - self.kobj_queue = kobj_queue - self.poll_event_buf = poll_event_buf self.response_handler = response_handler self._build_app() @@ -75,23 +46,30 @@ async def lifespan(*args, **kwargs): version="1.0.0" ) - self.router = APIRouter(prefix="/koi-net") self.app.add_exception_handler(ProtocolError, self.protocol_error_handler) - def _add_endpoint(path, func): + self.router = APIRouter(prefix="/koi-net") + + for path, models in API_MODEL_MAP.items(): + def create_endpoint(path: str): + async def endpoint(req): + return self.response_handler.handle_response(path, req) + + # programmatically setting type hint annotations for FastAPI's model validation + endpoint.__annotations__ = { + "req": models.request_envelope, + "return": models.response_envelope + } + + return endpoint + self.router.add_api_route( path=path, - endpoint=self.secure.envelope_handler(func), + endpoint=create_endpoint(path), methods=["POST"], response_model_exclude_none=True ) - _add_endpoint(BROADCAST_EVENTS_PATH, self.broadcast_events) - _add_endpoint(POLL_EVENTS_PATH, self.poll_events) - _add_endpoint(FETCH_RIDS_PATH, self.fetch_rids) - _add_endpoint(FETCH_MANIFESTS_PATH, self.fetch_manifests) - _add_endpoint(FETCH_BUNDLES_PATH, self.fetch_bundles) - self.app.include_router(self.router) def run(self): @@ -112,36 +90,4 @@ def protocol_error_handler(self, request, exc: ProtocolError): return JSONResponse( status_code=400, content=resp.model_dump(mode="json") - ) - - async def broadcast_events(self, req: SignedEnvelope[EventsPayload]): - """Handles events broadcast endpoint.""" - log.info(f"Request to {BROADCAST_EVENTS_PATH}, received {len(req.payload.events)} event(s)") - for event in req.payload.events: - self.kobj_queue.put_kobj(event=event, source=req.source_node) - - async def poll_events( - self, req: SignedEnvelope[PollEvents] - ) -> SignedEnvelope[EventsPayload] | ErrorResponse: - """Handles poll events endpoint.""" - log.info(f"Request to {POLL_EVENTS_PATH}") - events = self.poll_event_buf.flush(req.source_node) - return EventsPayload(events=events) - - async def fetch_rids( - self, req: SignedEnvelope[FetchRids] - ) -> SignedEnvelope[RidsPayload] | ErrorResponse: - """Handles fetch RIDs endpoint.""" - return self.response_handler.fetch_rids(req.payload, req.source_node) - - async def fetch_manifests( - self, req: SignedEnvelope[FetchManifests] - ) -> SignedEnvelope[ManifestsPayload] | ErrorResponse: - """Handles fetch manifests endpoint.""" - return self.response_handler.fetch_manifests(req.payload, req.source_node) - - async def fetch_bundles( - self, req: SignedEnvelope[FetchBundles] - ) -> SignedEnvelope[BundlesPayload] | ErrorResponse: - """Handles fetch bundles endpoint.""" - return self.response_handler.fetch_bundles(req.payload, req.source_node) + ) \ No newline at end of file From 405a7db58a6b71bfdb3e870b0e227de82def7500 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 15 Oct 2025 17:30:35 -0400 Subject: [PATCH 15/53] working on a better dependency injection system, moved around some default values and dependencies --- src/koi_net/assembler.py | 190 ++++++++++++++++++++++++++ src/koi_net/core.py | 32 ++++- src/koi_net/interfaces/entrypoint.py | 5 + src/koi_net/poller.py | 4 +- src/koi_net/processor/event_worker.py | 11 +- src/koi_net/processor/kobj_worker.py | 6 +- src/koi_net/server.py | 24 ++-- 7 files changed, 247 insertions(+), 25 deletions(-) create mode 100644 src/koi_net/assembler.py create mode 100644 src/koi_net/interfaces/entrypoint.py diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py new file mode 100644 index 0000000..2e1ce5a --- /dev/null +++ b/src/koi_net/assembler.py @@ -0,0 +1,190 @@ +import inspect +from typing import Protocol +from dataclasses import make_dataclass + +import structlog + +from rid_lib.ext import Cache +from koi_net.config import NodeConfig +from koi_net.context import HandlerContext +from koi_net.core import NodeContainer +from koi_net.effector import Effector +from koi_net.handshaker import Handshaker +from koi_net.identity import NodeIdentity +from koi_net.interfaces.entrypoint import EntryPoint +from koi_net.processor.kobj_worker import KnowledgeProcessingWorker +from koi_net.lifecycle import NodeLifecycle +from koi_net.network.error_handler import ErrorHandler +from koi_net.network.event_queue import EventQueue +from koi_net.network.graph import NetworkGraph +from koi_net.network.request_handler import RequestHandler +from koi_net.network.resolver import NetworkResolver +from koi_net.network.response_handler import ResponseHandler +from koi_net.network.poll_event_buffer import PollEventBuffer +from koi_net.poller import NodePoller +from koi_net.processor.handlers import ( + basic_manifest_handler, + basic_network_output_filter, + basic_rid_handler, + node_contact_handler, + edge_negotiation_handler, + forget_edge_on_node_deletion, + secure_profile_handler +) +from koi_net.processor.event_worker import EventProcessingWorker +from koi_net.processor.pipeline import KnowledgePipeline +from koi_net.processor.kobj_queue import KobjQueue +from koi_net.secure import Secure +from koi_net.server import NodeServer + +log = structlog.stdlib.get_logger() + + +class RecipeBuilder(type): + def __new__(cls, name: str, bases: tuple, dct: dict[str]): + cls = super().__new__(cls, name, bases, dct) + + def safe_update(d1: dict[str], d2: dict[str]): + for k, v in d2.items(): + # excludes private and reserved attributes + if not k.startswith("_"): + d1[k] = v + + if "_build_order" not in dct: + components = {} + for base in bases: + # adds components from base classes + safe_update(components, vars(base)) + + # adds components from this class + safe_update(components, dct) + + # recipe list constructed from names of non-None components + cls._build_order = [ + name for name, _type in components.items() + if _type is not None + ] + + return cls + +# class NodeAssembler(metaclass=NodeReciper): +# # recipe: list[str] +# # components: type + +# @classmethod + +class NodeContainer(Protocol): + entrypoint = EntryPoint + +class NodeAssembler: + blueprint = None + + def __init__(self): + return self.build(self.blueprint) + + @staticmethod + def build(blueprint: RecipeBuilder) -> NodeContainer: + components = {} + for comp_name in blueprint._build_order: + log.info(f"Assembling component '{comp_name}'") + + comp_factory = getattr(blueprint, comp_name, None) + + if comp_factory is None: + log.error("Couldn't find factory for component") + return + + sig = inspect.signature(comp_factory) + + required_comps = [] + for name, param in sig.parameters.items(): + required_comps.append((name, param.annotation)) + + log.info(f"Component requires {[d[0] for d in required_comps]}") + + kwargs = {} + for req_comp_name, req_comp_type in required_comps: + comp = components.get(req_comp_name) + if not comp: + log.error(f"failed to resolve dependency {req_comp_name}") + + kwargs[req_comp_name] = comp + + components[comp_name] = comp_factory(**kwargs) + + NodeContainer = make_dataclass( + cls_name="NodeContainer", + fields=[ + (name, type(component)) + for name, component + in components.items() + ], + frozen=True + ) + + return NodeContainer(**components) + +class NodeBlueprint(metaclass=RecipeBuilder): + pass + + +def make_config() -> NodeConfig: + return NodeConfig.load_from_yaml() + +def make_cache(config: NodeConfig) -> Cache: + return Cache(directory_path=config.koi_net.cache_directory_path) + + +class BaseNodeBlueprint(NodeBlueprint): + config = make_config + kobj_queue = KobjQueue + event_queue = EventQueue + poll_event_buf = PollEventBuffer + knowledge_handlers = lambda: [ + basic_rid_handler, + basic_manifest_handler, + secure_profile_handler, + edge_negotiation_handler, + node_contact_handler, + basic_network_output_filter, + forget_edge_on_node_deletion + ] + cache = make_cache + identity = NodeIdentity + graph = NetworkGraph + secure = Secure + handshaker = Handshaker + error_handler = ErrorHandler + request_handler = RequestHandler + response_handler = ResponseHandler + resolver = NetworkResolver + effector = Effector + handler_context = HandlerContext + pipeline = KnowledgePipeline + kobj_worker = KnowledgeProcessingWorker + event_worker = EventProcessingWorker + lifecycle = NodeLifecycle + server = NodeServer + +class FullNodeBlueprint(BaseNodeBlueprint): + entrypoint = NodeServer + +class PartialNodeBlueprint(BaseNodeBlueprint): + entrypoint = NodePoller + +class FullNode(NodeAssembler): + blueprint = FullNodeBlueprint + + + +if __name__ == "__main__": + print("Full Node:") + for n, name in enumerate(FullNodeBlueprint._build_order): + print(f"{n}. {name}") + + print("Partial Node:") + for n, name in enumerate(PartialNodeBlueprint._build_order): + print(f"{n}. {name}") + + partial_node = NodeAssembler.build(PartialNodeBlueprint) + full_node = FullNode() \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 571a6df..e0ee906 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -5,6 +5,7 @@ from koi_net.effector import Effector from koi_net.handshaker import Handshaker from koi_net.identity import NodeIdentity +from koi_net.processor.handler import KnowledgeHandler from koi_net.processor.kobj_worker import KnowledgeProcessingWorker from koi_net.lifecycle import NodeLifecycle from koi_net.network.error_handler import ErrorHandler @@ -41,6 +42,8 @@ class NodeContainer: identity: NodeIdentity graph: NetworkGraph secure: Secure + handshaker: Handshaker + knowledge_handlers: list[KnowledgeHandler] request_handler: RequestHandler response_handler: ResponseHandler resolver: NetworkResolver @@ -52,7 +55,7 @@ class NodeContainer: error_handler: ErrorHandler lifecycle: NodeLifecycle server: NodeServer - poller: NodePoller + # poller: NodePoller class NodeAssembler: poll_event_buf = PollEventBuffer @@ -88,16 +91,21 @@ class NodeAssembler: @classmethod def create(cls) -> NodeContainer: - poll_event_buf = cls.poll_event_buf() + # Layer 0 + config = cls.config.load_from_yaml() kobj_queue = cls.kobj_queue() event_queue = cls.event_queue() - config = cls.config.load_from_yaml() + poll_event_buf = cls.poll_event_buf() + + # Layer 1 cache = cls.cache( directory_path=config.koi_net.cache_directory_path ) identity = cls.identity( config=config ) + + # Layer 2 graph = cls.graph( cache=cache, identity=identity @@ -112,10 +120,14 @@ def create(cls) -> NodeContainer: identity=identity, event_queue=event_queue ) + + # Layer 3 error_handler = cls.error_handler( kobj_queue=kobj_queue, handshaker=handshaker ) + + # Layer 4 request_handler = cls.request_handler( cache=cache, identity=identity, @@ -128,6 +140,8 @@ def create(cls) -> NodeContainer: poll_event_buf=poll_event_buf, secure=secure ) + + # Layer 5 resolver = cls.resolver( config=config, cache=cache, @@ -135,12 +149,16 @@ def create(cls) -> NodeContainer: graph=graph, request_handler=request_handler ) + + # Layer 6 effector = cls.effector( cache=cache, resolver=resolver, kobj_queue=kobj_queue, identity=identity ) + + # Layer 7 handler_context = cls.handler_context( identity=identity, config=config, @@ -152,6 +170,8 @@ def create(cls) -> NodeContainer: resolver=resolver, effector=effector ) + + # Layer 8 pipeline = cls.pipeline( handler_context=handler_context, cache=cache, @@ -160,6 +180,8 @@ def create(cls) -> NodeContainer: graph=graph, knowledge_handlers=cls.knowledge_handlers ) + + # Layer 9 kobj_worker = cls.kobj_worker( kobj_queue=kobj_queue, pipeline=pipeline @@ -171,6 +193,8 @@ def create(cls) -> NodeContainer: request_handler=request_handler, poll_event_buf=poll_event_buf ) + + # Layer 10 lifecycle = cls.lifecycle( config=config, identity=identity, @@ -183,6 +207,8 @@ def create(cls) -> NodeContainer: handshaker=handshaker, request_handler=request_handler ) + + # Layer 11 server = cls.server( config=config, lifecycle=lifecycle, diff --git a/src/koi_net/interfaces/entrypoint.py b/src/koi_net/interfaces/entrypoint.py new file mode 100644 index 0000000..ab46e97 --- /dev/null +++ b/src/koi_net/interfaces/entrypoint.py @@ -0,0 +1,5 @@ +from typing import Protocol + + +class EntryPoint(Protocol): + def run(self): ... \ No newline at end of file diff --git a/src/koi_net/poller.py b/src/koi_net/poller.py index a5633bf..544411a 100644 --- a/src/koi_net/poller.py +++ b/src/koi_net/poller.py @@ -1,6 +1,8 @@ import time import structlog + +from koi_net.interfaces.entrypoint import EntryPoint from .processor.kobj_queue import KobjQueue from .lifecycle import NodeLifecycle from .network.resolver import NetworkResolver @@ -9,7 +11,7 @@ log = structlog.stdlib.get_logger() -class NodePoller: +class NodePoller(EntryPoint): """Manages polling based event loop for partial nodes.""" kobj_queue: KobjQueue lifecycle: NodeLifecycle diff --git a/src/koi_net/processor/event_worker.py b/src/koi_net/processor/event_worker.py index 01d636e..3e25010 100644 --- a/src/koi_net/processor/event_worker.py +++ b/src/koi_net/processor/event_worker.py @@ -27,10 +27,7 @@ def __init__( request_handler: RequestHandler, config: NodeConfig, cache: Cache, - poll_event_buf: PollEventBuffer, - queue_timeout: float = 0.1, - max_buf_len: int = 5, - max_wait_time: float = 1.0 + poll_event_buf: PollEventBuffer ): self.event_queue = event_queue self.request_handler = request_handler @@ -39,9 +36,9 @@ def __init__( self.cache = cache self.poll_event_buf = poll_event_buf - self.timeout = queue_timeout - self.max_buf_len = max_buf_len - self.max_wait_time = max_wait_time + self.timeout: float = 0.1 + self.max_buf_len: int = 5 + self.max_wait_time: float = 1.0 self.event_buffer = dict() self.buffer_times = dict() diff --git a/src/koi_net/processor/kobj_worker.py b/src/koi_net/processor/kobj_worker.py index 8f2963d..5b35387 100644 --- a/src/koi_net/processor/kobj_worker.py +++ b/src/koi_net/processor/kobj_worker.py @@ -13,12 +13,12 @@ class KnowledgeProcessingWorker(ThreadWorker): def __init__( self, kobj_queue: KobjQueue, - pipeline: KnowledgePipeline, - timeout: float = 0.1 + pipeline: KnowledgePipeline ): self.kobj_queue = kobj_queue self.pipeline = pipeline - self.timeout = timeout + self.timeout: float = 0.1 + super().__init__() def run(self): diff --git a/src/koi_net/server.py b/src/koi_net/server.py index f405b99..fef170d 100644 --- a/src/koi_net/server.py +++ b/src/koi_net/server.py @@ -4,6 +4,8 @@ from fastapi import FastAPI, APIRouter from fastapi.responses import JSONResponse +from koi_net.interfaces.entrypoint import EntryPoint + from .network.response_handler import ResponseHandler from .protocol.model_map import API_MODEL_MAP from .protocol.api_models import ErrorResponse @@ -14,7 +16,7 @@ log = structlog.stdlib.get_logger() -class NodeServer: +class NodeServer(EntryPoint): """Manages FastAPI server and event handling for full nodes.""" config: NodeConfig lifecycle: NodeLifecycle @@ -71,16 +73,6 @@ async def endpoint(req): ) self.app.include_router(self.router) - - def run(self): - """Starts FastAPI server and event handler.""" - uvicorn.run( - app=self.app, - host=self.config.server.host, - port=self.config.server.port, - log_config=None, - access_log=False - ) def protocol_error_handler(self, request, exc: ProtocolError): """Catches `ProtocolError` and returns as `ErrorResponse`.""" @@ -90,4 +82,14 @@ def protocol_error_handler(self, request, exc: ProtocolError): return JSONResponse( status_code=400, content=resp.model_dump(mode="json") + ) + + def run(self): + """Starts FastAPI server and event handler.""" + uvicorn.run( + app=self.app, + host=self.config.server.host, + port=self.config.server.port, + log_config=None, + access_log=False ) \ No newline at end of file From 5aed08c7a6c1722ecac091d6091a21d266804c4e Mon Sep 17 00:00:00 2001 From: lukvmil Date: Thu, 16 Oct 2025 10:45:36 -0400 Subject: [PATCH 16/53] figured out clean unified design for assembler and blueprints, works like a regular class is expected to --- src/koi_net/assembler.py | 43 ++++++++++++++-------------------------- 1 file changed, 15 insertions(+), 28 deletions(-) diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 2e1ce5a..69fa0d6 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -40,7 +40,7 @@ log = structlog.stdlib.get_logger() -class RecipeBuilder(type): +class BuildOrderer(type): def __new__(cls, name: str, bases: tuple, dct: dict[str]): cls = super().__new__(cls, name, bases, dct) @@ -67,28 +67,21 @@ def safe_update(d1: dict[str], d2: dict[str]): return cls -# class NodeAssembler(metaclass=NodeReciper): -# # recipe: list[str] -# # components: type - -# @classmethod class NodeContainer(Protocol): entrypoint = EntryPoint -class NodeAssembler: - blueprint = None - - def __init__(self): - return self.build(self.blueprint) +class NodeAssembler(metaclass=BuildOrderer): + def __new__(self) -> NodeContainer: + return self._build() - @staticmethod - def build(blueprint: RecipeBuilder) -> NodeContainer: + @classmethod + def _build(cls) -> NodeContainer: components = {} - for comp_name in blueprint._build_order: + for comp_name in cls._build_order: log.info(f"Assembling component '{comp_name}'") - comp_factory = getattr(blueprint, comp_name, None) + comp_factory = getattr(cls, comp_name, None) if comp_factory is None: log.error("Couldn't find factory for component") @@ -124,8 +117,6 @@ def build(blueprint: RecipeBuilder) -> NodeContainer: return NodeContainer(**components) -class NodeBlueprint(metaclass=RecipeBuilder): - pass def make_config() -> NodeConfig: @@ -135,7 +126,7 @@ def make_cache(config: NodeConfig) -> Cache: return Cache(directory_path=config.koi_net.cache_directory_path) -class BaseNodeBlueprint(NodeBlueprint): +class BaseNode(NodeAssembler): config = make_config kobj_queue = KobjQueue event_queue = EventQueue @@ -166,25 +157,21 @@ class BaseNodeBlueprint(NodeBlueprint): lifecycle = NodeLifecycle server = NodeServer -class FullNodeBlueprint(BaseNodeBlueprint): +class FullNode(BaseNode): entrypoint = NodeServer -class PartialNodeBlueprint(BaseNodeBlueprint): +class PartialNode(BaseNode): entrypoint = NodePoller - -class FullNode(NodeAssembler): - blueprint = FullNodeBlueprint - - + if __name__ == "__main__": print("Full Node:") - for n, name in enumerate(FullNodeBlueprint._build_order): + for n, name in enumerate(FullNode._build_order): print(f"{n}. {name}") print("Partial Node:") - for n, name in enumerate(PartialNodeBlueprint._build_order): + for n, name in enumerate(PartialNode._build_order): print(f"{n}. {name}") - partial_node = NodeAssembler.build(PartialNodeBlueprint) + partial_node = PartialNode() full_node = FullNode() \ No newline at end of file From ab4eb0cbe0baff2cdcc8c4bfeb314b9ef0d9fdcf Mon Sep 17 00:00:00 2001 From: lukvmil Date: Thu, 16 Oct 2025 16:18:20 -0400 Subject: [PATCH 17/53] refined node assembler, parses through all base classes in reverse order to construct _build_order, moved base, partial, and full node defintions back to core.py. marking for 1.2.0b2 release --- pyproject.toml | 2 +- src/koi_net/__init__.py | 1 - src/koi_net/assembler.py | 139 ++++---------------- src/koi_net/core.py | 269 ++++++++------------------------------- 4 files changed, 74 insertions(+), 337 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2c2152e..b4653a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "koi-net" -version = "1.2.0b1" +version = "1.2.0b2" description = "Implementation of KOI-net protocol in Python" authors = [ {name = "Luke Miller", email = "luke@block.science"} diff --git a/src/koi_net/__init__.py b/src/koi_net/__init__.py index b045be0..45dea67 100644 --- a/src/koi_net/__init__.py +++ b/src/koi_net/__init__.py @@ -1,3 +1,2 @@ -from .core import NodeContainer from . import logger from . import sentry \ No newline at end of file diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 69fa0d6..0cab2e3 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -4,61 +4,25 @@ import structlog -from rid_lib.ext import Cache -from koi_net.config import NodeConfig -from koi_net.context import HandlerContext -from koi_net.core import NodeContainer -from koi_net.effector import Effector -from koi_net.handshaker import Handshaker -from koi_net.identity import NodeIdentity -from koi_net.interfaces.entrypoint import EntryPoint -from koi_net.processor.kobj_worker import KnowledgeProcessingWorker -from koi_net.lifecycle import NodeLifecycle -from koi_net.network.error_handler import ErrorHandler -from koi_net.network.event_queue import EventQueue -from koi_net.network.graph import NetworkGraph -from koi_net.network.request_handler import RequestHandler -from koi_net.network.resolver import NetworkResolver -from koi_net.network.response_handler import ResponseHandler -from koi_net.network.poll_event_buffer import PollEventBuffer -from koi_net.poller import NodePoller -from koi_net.processor.handlers import ( - basic_manifest_handler, - basic_network_output_filter, - basic_rid_handler, - node_contact_handler, - edge_negotiation_handler, - forget_edge_on_node_deletion, - secure_profile_handler -) -from koi_net.processor.event_worker import EventProcessingWorker -from koi_net.processor.pipeline import KnowledgePipeline -from koi_net.processor.kobj_queue import KobjQueue -from koi_net.secure import Secure -from koi_net.server import NodeServer +from .interfaces.entrypoint import EntryPoint log = structlog.stdlib.get_logger() class BuildOrderer(type): def __new__(cls, name: str, bases: tuple, dct: dict[str]): + """Sets `cls._build_order` from component order in class definition.""" cls = super().__new__(cls, name, bases, dct) - def safe_update(d1: dict[str], d2: dict[str]): - for k, v in d2.items(): - # excludes private and reserved attributes - if not k.startswith("_"): - d1[k] = v - if "_build_order" not in dct: components = {} - for base in bases: - # adds components from base classes - safe_update(components, vars(base)) - - # adds components from this class - safe_update(components, dct) - + # adds components from base classes (including cls) + for base in reversed(inspect.getmro(cls)[:-1]): + for k, v in vars(base).items(): + # excludes built in and private attributes + if not k.startswith("_"): + components[k] = v + # recipe list constructed from names of non-None components cls._build_order = [ name for name, _type in components.items() @@ -67,7 +31,6 @@ def safe_update(d1: dict[str], d2: dict[str]): return cls - class NodeContainer(Protocol): entrypoint = EntryPoint @@ -78,14 +41,11 @@ def __new__(self) -> NodeContainer: @classmethod def _build(cls) -> NodeContainer: components = {} - for comp_name in cls._build_order: - log.info(f"Assembling component '{comp_name}'") - + for comp_name in cls._build_order: comp_factory = getattr(cls, comp_name, None) if comp_factory is None: - log.error("Couldn't find factory for component") - return + raise Exception(f"Couldn't find factory for component '{comp_name}'") sig = inspect.signature(comp_factory) @@ -93,17 +53,21 @@ def _build(cls) -> NodeContainer: for name, param in sig.parameters.items(): required_comps.append((name, param.annotation)) - log.info(f"Component requires {[d[0] for d in required_comps]}") + if len(required_comps) == 0: + s = comp_name + else: + s = f"{comp_name} -> {', '.join([name for name, _type in required_comps])}" - kwargs = {} + print(s.replace("graph", "_graph"), end=";\n") + + dependencies = {} for req_comp_name, req_comp_type in required_comps: - comp = components.get(req_comp_name) - if not comp: - log.error(f"failed to resolve dependency {req_comp_name}") + if req_comp_name not in components: + raise Exception(f"Couldn't find required component '{req_comp_name}'") - kwargs[req_comp_name] = comp + dependencies[req_comp_name] = components[req_comp_name] - components[comp_name] = comp_factory(**kwargs) + components[comp_name] = comp_factory(**dependencies) NodeContainer = make_dataclass( cls_name="NodeContainer", @@ -116,62 +80,3 @@ def _build(cls) -> NodeContainer: ) return NodeContainer(**components) - - - -def make_config() -> NodeConfig: - return NodeConfig.load_from_yaml() - -def make_cache(config: NodeConfig) -> Cache: - return Cache(directory_path=config.koi_net.cache_directory_path) - - -class BaseNode(NodeAssembler): - config = make_config - kobj_queue = KobjQueue - event_queue = EventQueue - poll_event_buf = PollEventBuffer - knowledge_handlers = lambda: [ - basic_rid_handler, - basic_manifest_handler, - secure_profile_handler, - edge_negotiation_handler, - node_contact_handler, - basic_network_output_filter, - forget_edge_on_node_deletion - ] - cache = make_cache - identity = NodeIdentity - graph = NetworkGraph - secure = Secure - handshaker = Handshaker - error_handler = ErrorHandler - request_handler = RequestHandler - response_handler = ResponseHandler - resolver = NetworkResolver - effector = Effector - handler_context = HandlerContext - pipeline = KnowledgePipeline - kobj_worker = KnowledgeProcessingWorker - event_worker = EventProcessingWorker - lifecycle = NodeLifecycle - server = NodeServer - -class FullNode(BaseNode): - entrypoint = NodeServer - -class PartialNode(BaseNode): - entrypoint = NodePoller - - -if __name__ == "__main__": - print("Full Node:") - for n, name in enumerate(FullNode._build_order): - print(f"{n}. {name}") - - print("Partial Node:") - for n, name in enumerate(PartialNode._build_order): - print(f"{n}. {name}") - - partial_node = PartialNode() - full_node = FullNode() \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index e0ee906..aa976f5 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,22 +1,21 @@ -from dataclasses import dataclass from rid_lib.ext import Cache -from koi_net.config import NodeConfig -from koi_net.context import HandlerContext -from koi_net.effector import Effector -from koi_net.handshaker import Handshaker -from koi_net.identity import NodeIdentity -from koi_net.processor.handler import KnowledgeHandler -from koi_net.processor.kobj_worker import KnowledgeProcessingWorker -from koi_net.lifecycle import NodeLifecycle -from koi_net.network.error_handler import ErrorHandler -from koi_net.network.event_queue import EventQueue -from koi_net.network.graph import NetworkGraph -from koi_net.network.request_handler import RequestHandler -from koi_net.network.resolver import NetworkResolver -from koi_net.network.response_handler import ResponseHandler -from koi_net.network.poll_event_buffer import PollEventBuffer -from koi_net.poller import NodePoller -from koi_net.processor.handlers import ( +from .assembler import NodeAssembler +from .config.base import BaseConfig +from .context import HandlerContext +from .effector import Effector +from .handshaker import Handshaker +from .identity import NodeIdentity +from .processor.kobj_worker import KnowledgeProcessingWorker +from .lifecycle import NodeLifecycle +from .network.error_handler import ErrorHandler +from .network.event_queue import EventQueue +from .network.graph import NetworkGraph +from .network.request_handler import RequestHandler +from .network.resolver import NetworkResolver +from .network.response_handler import ResponseHandler +from .network.poll_event_buffer import PollEventBuffer +from .poller import NodePoller +from .processor.handlers import ( basic_manifest_handler, basic_network_output_filter, basic_rid_handler, @@ -25,52 +24,28 @@ forget_edge_on_node_deletion, secure_profile_handler ) -from koi_net.processor.event_worker import EventProcessingWorker -from koi_net.processor.pipeline import KnowledgePipeline -from koi_net.processor.kobj_queue import KobjQueue -from koi_net.secure import Secure -from koi_net.server import NodeServer +from .processor.event_worker import EventProcessingWorker +from .processor.pipeline import KnowledgePipeline +from .processor.kobj_queue import KobjQueue +from .secure import Secure +from .server import NodeServer -@dataclass -class NodeContainer: - poll_event_buf: PollEventBuffer - kobj_queue: KobjQueue - event_queue: EventQueue - config: NodeConfig - cache: Cache - identity: NodeIdentity - graph: NetworkGraph - secure: Secure - handshaker: Handshaker - knowledge_handlers: list[KnowledgeHandler] - request_handler: RequestHandler - response_handler: ResponseHandler - resolver: NetworkResolver - effector: Effector - handler_context: HandlerContext - pipeline: KnowledgePipeline - kobj_worker: KnowledgeProcessingWorker - event_worker: EventProcessingWorker - error_handler: ErrorHandler - lifecycle: NodeLifecycle - server: NodeServer - # poller: NodePoller +# factory functions for components with non standard initializiation -class NodeAssembler: - poll_event_buf = PollEventBuffer +def make_config() -> BaseConfig: + return BaseConfig.load_from_yaml() + +def make_cache(config: BaseConfig) -> Cache: + return Cache(directory_path=config.koi_net.cache_directory_path) + + +class BaseNode(NodeAssembler): + config = lambda: None kobj_queue = KobjQueue event_queue = EventQueue - config = NodeConfig - cache = Cache - identity = NodeIdentity - graph = NetworkGraph - secure = Secure - handshaker = Handshaker - request_handler = RequestHandler - response_handler = ResponseHandler - resolver = NetworkResolver - knowledge_handlers = [ + poll_event_buf = PollEventBuffer + knowledge_handlers = lambda: [ basic_rid_handler, basic_manifest_handler, secure_profile_handler, @@ -79,167 +54,25 @@ class NodeAssembler: basic_network_output_filter, forget_edge_on_node_deletion ] - handler_context = HandlerContext + cache = make_cache + identity = NodeIdentity + graph = NetworkGraph + secure = Secure + handshaker = Handshaker + error_handler = ErrorHandler + request_handler = RequestHandler + response_handler = ResponseHandler + resolver = NetworkResolver effector = Effector + handler_context = HandlerContext pipeline = KnowledgePipeline kobj_worker = KnowledgeProcessingWorker event_worker = EventProcessingWorker - error_handler = ErrorHandler lifecycle = NodeLifecycle - server = NodeServer - poller = NodePoller - - @classmethod - def create(cls) -> NodeContainer: - # Layer 0 - config = cls.config.load_from_yaml() - kobj_queue = cls.kobj_queue() - event_queue = cls.event_queue() - poll_event_buf = cls.poll_event_buf() - - # Layer 1 - cache = cls.cache( - directory_path=config.koi_net.cache_directory_path - ) - identity = cls.identity( - config=config - ) - - # Layer 2 - graph = cls.graph( - cache=cache, - identity=identity - ) - secure = cls.secure( - identity=identity, - cache=cache, - config=config - ) - handshaker = cls.handshaker( - cache=cache, - identity=identity, - event_queue=event_queue - ) - - # Layer 3 - error_handler = cls.error_handler( - kobj_queue=kobj_queue, - handshaker=handshaker - ) - - # Layer 4 - request_handler = cls.request_handler( - cache=cache, - identity=identity, - secure=secure, - error_handler=error_handler - ) - response_handler = cls.response_handler( - cache=cache, - kobj_queue=kobj_queue, - poll_event_buf=poll_event_buf, - secure=secure - ) - - # Layer 5 - resolver = cls.resolver( - config=config, - cache=cache, - identity=identity, - graph=graph, - request_handler=request_handler - ) - - # Layer 6 - effector = cls.effector( - cache=cache, - resolver=resolver, - kobj_queue=kobj_queue, - identity=identity - ) - - # Layer 7 - handler_context = cls.handler_context( - identity=identity, - config=config, - cache=cache, - event_queue=event_queue, - kobj_queue=kobj_queue, - graph=graph, - request_handler=request_handler, - resolver=resolver, - effector=effector - ) - - # Layer 8 - pipeline = cls.pipeline( - handler_context=handler_context, - cache=cache, - request_handler=request_handler, - event_queue=event_queue, - graph=graph, - knowledge_handlers=cls.knowledge_handlers - ) - - # Layer 9 - kobj_worker = cls.kobj_worker( - kobj_queue=kobj_queue, - pipeline=pipeline - ) - event_worker = cls.event_worker( - config=config, - cache=cache, - event_queue=event_queue, - request_handler=request_handler, - poll_event_buf=poll_event_buf - ) - - # Layer 10 - lifecycle = cls.lifecycle( - config=config, - identity=identity, - graph=graph, - kobj_queue=kobj_queue, - kobj_worker=kobj_worker, - event_queue=event_queue, - event_worker=event_worker, - cache=cache, - handshaker=handshaker, - request_handler=request_handler - ) - - # Layer 11 - server = cls.server( - config=config, - lifecycle=lifecycle, - response_handler=response_handler - ) - poller = cls.poller( - kobj_queue=kobj_queue, - lifecycle=lifecycle, - resolver=resolver, - config=config - ) - - return NodeContainer( - poll_event_buf=poll_event_buf, - kobj_queue=kobj_queue, - event_queue=event_queue, - config=config, - cache=cache, - identity=identity, - graph=graph, - secure=secure, - request_handler=request_handler, - response_handler=response_handler, - resolver=resolver, - effector=effector, - handler_context=handler_context, - pipeline=pipeline, - kobj_worker=kobj_worker, - event_worker=event_worker, - error_handler=error_handler, - lifecycle=lifecycle, - server=server, - poller=poller - ) \ No newline at end of file + + +class FullNode(BaseNode): + entrypoint = NodeServer + +class PartialNode(BaseNode): + entrypoint = NodePoller \ No newline at end of file From 7edec905799a0a97db3b552934cc92fc9d9dc758 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 21 Oct 2025 15:05:18 -0400 Subject: [PATCH 18/53] attempt to refactor config system. separate subsystem ConfigLoader handles reading and writing to YAML, config components now separated by full/partial node, generating on missing logic moved to model validators, added support for non factories to dependency injection framework -- detects non callables, and Pydantic BaseModels and treats them as is --- examples/coordinator.py | 51 ++++---- examples/partial.py | 27 ++--- src/koi_net/__init__.py | 1 - src/koi_net/assembler.py | 25 +++- src/koi_net/cli/models.py | 4 +- src/koi_net/config.py | 161 -------------------------- src/koi_net/config/core.py | 71 ++++++++++++ src/koi_net/config/full_node.py | 31 +++++ src/koi_net/config/loader.py | 51 ++++++++ src/koi_net/config/partial_node.py | 18 +++ src/koi_net/context.py | 2 +- src/koi_net/core.py | 14 +-- src/koi_net/entrypoint.py | 11 ++ src/koi_net/identity.py | 2 +- src/koi_net/interfaces/entrypoint.py | 4 +- src/koi_net/lifecycle.py | 2 +- src/koi_net/network/resolver.py | 2 +- src/koi_net/poller.py | 4 +- src/koi_net/processor/event_worker.py | 2 +- src/koi_net/protocol/node.py | 6 +- src/koi_net/protocol/secure.py | 6 + src/koi_net/secure.py | 2 +- src/koi_net/sentry.py | 13 --- src/koi_net/server.py | 5 +- 24 files changed, 266 insertions(+), 249 deletions(-) delete mode 100644 src/koi_net/config.py create mode 100644 src/koi_net/config/core.py create mode 100644 src/koi_net/config/full_node.py create mode 100644 src/koi_net/config/loader.py create mode 100644 src/koi_net/config/partial_node.py create mode 100644 src/koi_net/entrypoint.py delete mode 100644 src/koi_net/sentry.py diff --git a/examples/coordinator.py b/examples/coordinator.py index 369ab8e..f66cce3 100644 --- a/examples/coordinator.py +++ b/examples/coordinator.py @@ -1,10 +1,14 @@ import logging from rich.logging import RichHandler -from pydantic import Field from rid_lib.types import KoiNetNode, KoiNetEdge -from koi_net.config import NodeConfig, KoiNetConfig, ServerConfig -from koi_net.core import NodeAssembler -from koi_net.protocol.node import NodeProfile, NodeProvides, NodeType +from koi_net.config.full_node import ( + FullNodeConfig, + ServerConfig, + KoiNetConfig, + NodeProfile, + NodeProvides +) +from koi_net.core import FullNode from koi_net.context import HandlerContext from koi_net.processor.handler import HandlerType, KnowledgeHandler from koi_net.processor.knowledge_object import KnowledgeObject @@ -22,22 +26,17 @@ logger = logging.getLogger(__name__) -class CoordinatorConfig(NodeConfig): - server: ServerConfig = Field(default_factory=lambda: - ServerConfig(port=8080) - ) - koi_net: KoiNetConfig = Field(default_factory = lambda: - KoiNetConfig( - node_name="coordinator", - node_profile=NodeProfile( - node_type=NodeType.FULL, - provides=NodeProvides( - event=[KoiNetNode, KoiNetEdge], - state=[KoiNetNode, KoiNetEdge] - ) - ), - rid_types_of_interest=[KoiNetNode, KoiNetEdge] - ) +class CoordinatorConfig(FullNodeConfig): + server: ServerConfig = ServerConfig(port=8080) + koi_net: KoiNetConfig = KoiNetConfig( + node_name="coordinator", + node_profile=NodeProfile( + provides=NodeProvides( + event=[KoiNetNode, KoiNetEdge], + state=[KoiNetNode, KoiNetEdge] + ) + ), + rid_types_of_interest=[KoiNetNode, KoiNetEdge] ) @KnowledgeHandler.create( @@ -70,14 +69,10 @@ def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): ctx.kobj_queue.put_kobj(rid=edge_bundle.rid, event_type=EventType.FORGET) ctx.kobj_queue.put_kobj(bundle=edge_bundle) -class CoordinatorNodeAssembler(NodeAssembler): +class CoordinatorNode(FullNode): config = CoordinatorConfig - knowledge_handlers = [ - *NodeAssembler.knowledge_handlers, - handshake_handler - ] - + knowledge_handlers = FullNode.knowledge_handlers + [handshake_handler] if __name__ == "__main__": - node = CoordinatorNodeAssembler.create() - node.server.run() \ No newline at end of file + node = CoordinatorNode() + node.entrypoint.run() \ No newline at end of file diff --git a/examples/partial.py b/examples/partial.py index d8158b9..f0f66fe 100644 --- a/examples/partial.py +++ b/examples/partial.py @@ -1,9 +1,8 @@ import logging -from pydantic import Field from rich.logging import RichHandler -from koi_net.core import NodeAssembler -from koi_net.protocol.node import NodeProfile, NodeType -from koi_net.config import NodeConfig, KoiNetConfig +from koi_net.config.partial_node import PartialNodeConfig, KoiNetConfig, NodeProfile +from koi_net.core import PartialNode + logging.basicConfig( level=logging.INFO, @@ -16,20 +15,16 @@ logger = logging.getLogger(__name__) -class PartialNodeConfig(NodeConfig): - koi_net: KoiNetConfig = Field(default_factory = lambda: - KoiNetConfig( - node_name="partial", - node_profile=NodeProfile( - node_type=NodeType.PARTIAL - ) - ) +class MyPartialNodeConfig(PartialNodeConfig): + koi_net: KoiNetConfig = KoiNetConfig( + node_name="partial", + node_profile=NodeProfile() ) -class PartialNodeAssembler(NodeAssembler): - config = PartialNodeConfig +class MyPartialNode(PartialNode): + config_cls = MyPartialNodeConfig if __name__ == "__main__": - node = PartialNodeAssembler.create() - node.poller.run() \ No newline at end of file + node = MyPartialNode() + # node.entrypoint.run() \ No newline at end of file diff --git a/src/koi_net/__init__.py b/src/koi_net/__init__.py index 45dea67..906398a 100644 --- a/src/koi_net/__init__.py +++ b/src/koi_net/__init__.py @@ -1,2 +1 @@ from . import logger -from . import sentry \ No newline at end of file diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 0cab2e3..ffcd723 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -2,6 +2,7 @@ from typing import Protocol from dataclasses import make_dataclass +from pydantic import BaseModel import structlog from .interfaces.entrypoint import EntryPoint @@ -41,13 +42,25 @@ def __new__(self) -> NodeContainer: @classmethod def _build(cls) -> NodeContainer: components = {} - for comp_name in cls._build_order: - comp_factory = getattr(cls, comp_name, None) + for comp_name in cls._build_order: + comp = getattr(cls, comp_name, None) - if comp_factory is None: + if comp is None: raise Exception(f"Couldn't find factory for component '{comp_name}'") - sig = inspect.signature(comp_factory) + print(comp_name) + + if not callable(comp): + print(f"Treating {comp_name} as a literal") + components[comp_name] = comp + continue + + if issubclass(comp, BaseModel): + print(f"Treating {comp_name} as a pydantic model") + components[comp_name] = comp + continue + + sig = inspect.signature(comp) required_comps = [] for name, param in sig.parameters.items(): @@ -58,7 +71,7 @@ def _build(cls) -> NodeContainer: else: s = f"{comp_name} -> {', '.join([name for name, _type in required_comps])}" - print(s.replace("graph", "_graph"), end=";\n") + # print(s.replace("graph", "_graph"), end=";\n") dependencies = {} for req_comp_name, req_comp_type in required_comps: @@ -67,7 +80,7 @@ def _build(cls) -> NodeContainer: dependencies[req_comp_name] = components[req_comp_name] - components[comp_name] = comp_factory(**dependencies) + components[comp_name] = comp(**dependencies) NodeContainer = make_dataclass( cls_name="NodeContainer", diff --git a/src/koi_net/cli/models.py b/src/koi_net/cli/models.py index dfb4093..2cdae5b 100644 --- a/src/koi_net/cli/models.py +++ b/src/koi_net/cli/models.py @@ -1,9 +1,9 @@ -from pydantic import BaseModel, Field, PrivateAttr +from pydantic import BaseModel, PrivateAttr from ruamel.yaml import YAML class KoiNetworkConfig(BaseModel): - nodes: dict[str, str] = Field(default_factory=dict) + nodes: dict[str, str] = {} _file_path: str = PrivateAttr(default="koi-net-config.yaml") @classmethod diff --git a/src/koi_net/config.py b/src/koi_net/config.py deleted file mode 100644 index 7065dc6..0000000 --- a/src/koi_net/config.py +++ /dev/null @@ -1,161 +0,0 @@ -import os -from rid_lib import RIDType -from ruamel.yaml import YAML -from pydantic import BaseModel, Field, PrivateAttr -from dotenv import load_dotenv -from rid_lib.ext.utils import sha256_hash -from rid_lib.types import KoiNetNode -from .protocol.secure import PrivateKey -from .protocol.node import NodeProfile, NodeType - - -class ServerConfig(BaseModel): - """Config for the node server (full node only).""" - - host: str = "127.0.0.1" - port: int = 8000 - path: str | None = "/koi-net" - - @property - def url(self) -> str: - return f"http://{self.host}:{self.port}{self.path or ''}" - -class NodeContact(BaseModel): - rid: KoiNetNode | None = None - url: str | None = None - -class KoiNetConfig(BaseModel): - """Config for KOI-net.""" - - node_name: str - node_rid: KoiNetNode | None = None - node_profile: NodeProfile - - rid_types_of_interest: list[RIDType] = Field( - default_factory=lambda: [KoiNetNode]) - - cache_directory_path: str = ".rid_cache" - event_queues_path: str = "event_queues.json" - private_key_pem_path: str = "priv_key.pem" - polling_interval: int = 5 - - first_contact: NodeContact = Field(default_factory=NodeContact) - - _priv_key: PrivateKey | None = PrivateAttr(default=None) - -class EnvConfig(BaseModel): - """Config for environment variables. - - Values set in the config are the variables names, and are loaded - from the environment at runtime. For example, if the config YAML - sets `priv_key_password: PRIV_KEY_PASSWORD` accessing - `priv_key_password` would retrieve the value of `PRIV_KEY_PASSWORD` - from the environment. - """ - - priv_key_password: str | None = "PRIV_KEY_PASSWORD" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - load_dotenv() - - def __getattribute__(self, name): - value = super().__getattribute__(name) - if name in type(self).model_fields: - env_val = os.getenv(value) - if env_val is None: - raise ValueError(f"Required environment variable {value} not set") - return env_val - return value - -class NodeConfig(BaseModel): - """Base configuration class for all nodes. - - Designed to be extensible for custom node implementations. Classes - inheriting from `NodeConfig` may add additional config groups. - """ - - server: ServerConfig = Field(default_factory=ServerConfig) - koi_net: KoiNetConfig - env: EnvConfig = Field(default_factory=EnvConfig) - - _file_path: str = PrivateAttr(default="config.yaml") - _file_content: str | None = PrivateAttr(default=None) - - @classmethod - def load_from_yaml( - cls, - file_path: str = "config.yaml", - generate_missing: bool = True - ): - """Loads config state from YAML file. - - Defaults to `config.yaml`. If `generate_missing` is set to - `True`, a private key and RID will be generated if not already - present in the config. - """ - yaml = YAML() - - try: - with open(file_path, "r") as f: - file_content = f.read() - config_data = yaml.load(file_content) - config = cls.model_validate(config_data) - config._file_content = file_content - - except FileNotFoundError: - # empty_fields = {} - # for name, field in cls.model_fields.items(): - - # if field.default is None or field.default_factory is None: - # print(empty_fields) - config = cls() - - - config._file_path = file_path - - if generate_missing: - if not config.koi_net.node_rid: - priv_key = PrivateKey.generate() - pub_key = priv_key.public_key() - - config.koi_net.node_rid = KoiNetNode( - config.koi_net.node_name, - sha256_hash(pub_key.to_der()) - ) - - with open(config.koi_net.private_key_pem_path, "w") as f: - f.write( - priv_key.to_pem(config.env.priv_key_password) - ) - - config.koi_net.node_profile.public_key = pub_key.to_der() - - if config.koi_net.node_profile.node_type == NodeType.FULL: - config.koi_net.node_profile.base_url = ( - config.koi_net.node_profile.base_url or config.server.url - ) - - config.save_to_yaml() - - return config - - def save_to_yaml(self): - """Saves config state to YAML file. - - File path is set by `load_from_yaml` class method. - """ - - yaml = YAML() - - with open(self._file_path, "w") as f: - try: - config_data = self.model_dump(mode="json") - yaml.dump(config_data, f) - except Exception as e: - if self._file_content: - f.seek(0) - f.truncate() - f.write(self._file_content) - raise e - diff --git a/src/koi_net/config/core.py b/src/koi_net/config/core.py new file mode 100644 index 0000000..248bdc1 --- /dev/null +++ b/src/koi_net/config/core.py @@ -0,0 +1,71 @@ +import os +from pydantic import BaseModel, model_validator +from dotenv import load_dotenv +from rid_lib import RIDType +from rid_lib.types import KoiNetNode + +from koi_net.protocol.secure import PrivateKey +from ..protocol.node import NodeProfile + + +class NodeContact(BaseModel): + rid: KoiNetNode | None = None + url: str | None = None + +class KoiNetConfig(BaseModel): + """Config for KOI-net.""" + + node_name: str + node_rid: KoiNetNode | None = None + node_profile: NodeProfile + + rid_types_of_interest: list[RIDType] = [KoiNetNode] + + cache_directory_path: str = ".rid_cache" + event_queues_path: str = "event_queues.json" + private_key_pem_path: str = "priv_key.pem" + + first_contact: NodeContact = NodeContact() + +class EnvConfig(BaseModel): + """Config for environment variables. + + Values set in the config are the variables names, and are loaded + from the environment at runtime. For example, if the config YAML + sets `priv_key_password: PRIV_KEY_PASSWORD` accessing + `priv_key_password` would retrieve the value of `PRIV_KEY_PASSWORD` + from the environment. + """ + + priv_key_password: str = "PRIV_KEY_PASSWORD" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + load_dotenv() + + def __getattribute__(self, name): + value = super().__getattribute__(name) + if name in type(self).model_fields: + env_val = os.getenv(value) + if env_val is None: + raise ValueError(f"Required environment variable {value} not set") + return env_val + return value + +class NodeConfig(BaseModel): + koi_net: KoiNetConfig + env: EnvConfig = EnvConfig() + + @model_validator(mode="after") + def generate_rid_cascade(self): + if not self.koi_net.node_rid: + priv_key = PrivateKey.generate() + pub_key = priv_key.public_key() + + self.koi_net.node_rid = pub_key.to_node_rid(self.koi_net.node_name) + + with open(self.koi_net.private_key_pem_path, "w") as f: + f.write(priv_key.to_pem(self.env.priv_key_password)) + + self.koi_net.node_profile.public_key = pub_key.to_der() + return self \ No newline at end of file diff --git a/src/koi_net/config/full_node.py b/src/koi_net/config/full_node.py new file mode 100644 index 0000000..6fffdfc --- /dev/null +++ b/src/koi_net/config/full_node.py @@ -0,0 +1,31 @@ +from pydantic import BaseModel, model_validator +from koi_net.config.core import NodeConfig, KoiNetConfig as BaseKoiNetConfig +from ..protocol.node import NodeProfile as BaseNodeProfile, NodeType, NodeProvides + + +class NodeProfile(BaseNodeProfile): + node_type: NodeType = NodeType.FULL + +class KoiNetConfig(BaseKoiNetConfig): + node_profile: NodeProfile + +class ServerConfig(BaseModel): + """Config for the node server (full node only).""" + + host: str = "127.0.0.1" + port: int = 8000 + path: str | None = "/koi-net" + + @property + def url(self) -> str: + return f"http://{self.host}:{self.port}{self.path or ''}" + +class FullNodeConfig(NodeConfig): + koi_net: KoiNetConfig + server: ServerConfig = ServerConfig() + + @model_validator(mode="after") + def check_url(self): + if not self.koi_net.node_profile.base_url: + self.koi_net.node_profile.base_url = self.server.url + return self diff --git a/src/koi_net/config/loader.py b/src/koi_net/config/loader.py new file mode 100644 index 0000000..70db14d --- /dev/null +++ b/src/koi_net/config/loader.py @@ -0,0 +1,51 @@ +from ruamel.yaml import YAML +from .core import NodeConfig + + +class ConfigLoader: + _config: NodeConfig + + _file_path: str = "config.yaml" + _file_content: str + + def __init__(self, config_cls: type[NodeConfig]): + self._config_cls = config_cls + self.load_from_yaml() + + def __getattr__(self, name): + return getattr(self._config, name) + + def load_from_yaml(self): + """Loads config state from YAML file. + + Defaults to `config.yaml`. If `generate_missing` is set to + `True`, a private key and RID will be generated if not already + present in the config. + """ + yaml = YAML() + + try: + with open(self._file_path, "r") as f: + self._file_content = f.read() + config_data = yaml.load(self._file_content) + self._config = self._config_cls.model_validate(config_data) + + except FileNotFoundError: + self._config = self._config_cls() + + self.save_to_yaml() + + + def save_to_yaml(self): + yaml = YAML() + + with open(self._file_path, "w") as f: + try: + config_data = self._config.model_dump(mode="json") + yaml.dump(config_data, f) + except Exception as e: + if self._file_content: + f.seek(0) + f.truncate() + f.write(self._file_content) + raise e \ No newline at end of file diff --git a/src/koi_net/config/partial_node.py b/src/koi_net/config/partial_node.py new file mode 100644 index 0000000..1554126 --- /dev/null +++ b/src/koi_net/config/partial_node.py @@ -0,0 +1,18 @@ +from pydantic import BaseModel +from koi_net.config.core import NodeConfig, KoiNetConfig +from ..protocol.node import NodeProfile, NodeType, NodeProvides + + +class NodeProfile(NodeProfile): + base_url: str | None = None + node_type: NodeType = NodeType.PARTIAL + +class KoiNetConfig(KoiNetConfig): + node_profile: NodeProfile + +class PollerConfig(BaseModel): + polling_interval: int = 5 + +class PartialNodeConfig(NodeConfig): + koi_net: KoiNetConfig + poller: PollerConfig = PollerConfig() \ No newline at end of file diff --git a/src/koi_net/context.py b/src/koi_net/context.py index a1713df..3127172 100644 --- a/src/koi_net/context.py +++ b/src/koi_net/context.py @@ -2,7 +2,7 @@ from koi_net.effector import Effector from koi_net.network.resolver import NetworkResolver -from .config import NodeConfig +from .config.core import NodeConfig from .network.graph import NetworkGraph from .network.event_queue import EventQueue from .network.request_handler import RequestHandler diff --git a/src/koi_net/core.py b/src/koi_net/core.py index aa976f5..c87a87c 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,6 +1,8 @@ from rid_lib.ext import Cache + +from .config.loader import ConfigLoader from .assembler import NodeAssembler -from .config.base import BaseConfig +from .config.core import NodeConfig from .context import HandlerContext from .effector import Effector from .handshaker import Handshaker @@ -33,19 +35,18 @@ # factory functions for components with non standard initializiation -def make_config() -> BaseConfig: - return BaseConfig.load_from_yaml() -def make_cache(config: BaseConfig) -> Cache: +def make_cache(config: NodeConfig) -> Cache: return Cache(directory_path=config.koi_net.cache_directory_path) class BaseNode(NodeAssembler): - config = lambda: None + config_cls = NodeConfig kobj_queue = KobjQueue event_queue = EventQueue poll_event_buf = PollEventBuffer - knowledge_handlers = lambda: [ + config = ConfigLoader + knowledge_handlers = [ basic_rid_handler, basic_manifest_handler, secure_profile_handler, @@ -70,7 +71,6 @@ class BaseNode(NodeAssembler): event_worker = EventProcessingWorker lifecycle = NodeLifecycle - class FullNode(BaseNode): entrypoint = NodeServer diff --git a/src/koi_net/entrypoint.py b/src/koi_net/entrypoint.py new file mode 100644 index 0000000..867c8c1 --- /dev/null +++ b/src/koi_net/entrypoint.py @@ -0,0 +1,11 @@ +from koi_net.config.core import NodeConfig + + +class EntryPoint: + def __init__(self, config: NodeConfig): + self.config = config + + def run(self): ... + + def initialize(self): + self.config.load_from_yaml() \ No newline at end of file diff --git a/src/koi_net/identity.py b/src/koi_net/identity.py index 0a2d5bf..38e1a8e 100644 --- a/src/koi_net/identity.py +++ b/src/koi_net/identity.py @@ -1,6 +1,6 @@ import structlog from rid_lib.types.koi_net_node import KoiNetNode -from .config import NodeConfig +from .config.core import NodeConfig from .protocol.node import NodeProfile log = structlog.stdlib.get_logger() diff --git a/src/koi_net/interfaces/entrypoint.py b/src/koi_net/interfaces/entrypoint.py index ab46e97..a315ff0 100644 --- a/src/koi_net/interfaces/entrypoint.py +++ b/src/koi_net/interfaces/entrypoint.py @@ -2,4 +2,6 @@ class EntryPoint(Protocol): - def run(self): ... \ No newline at end of file + def run(self): ... + + def initialize(self): ... \ No newline at end of file diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index 771ae8e..b5df7b8 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -11,7 +11,7 @@ from .processor.event_worker import EventProcessingWorker from .protocol.api_models import ErrorResponse from .interfaces.worker import STOP_WORKER -from .config import NodeConfig +from .config.core import NodeConfig from .processor.kobj_queue import KobjQueue from .network.graph import NetworkGraph from .identity import NodeIdentity diff --git a/src/koi_net/network/resolver.py b/src/koi_net/network/resolver.py index 9264123..ef61d7a 100644 --- a/src/koi_net/network/resolver.py +++ b/src/koi_net/network/resolver.py @@ -11,7 +11,7 @@ from ..protocol.event import Event from ..protocol.api_models import ErrorResponse from ..identity import NodeIdentity -from ..config import NodeConfig +from ..config.core import NodeConfig log = structlog.stdlib.get_logger() diff --git a/src/koi_net/poller.py b/src/koi_net/poller.py index 544411a..9f4d577 100644 --- a/src/koi_net/poller.py +++ b/src/koi_net/poller.py @@ -2,11 +2,11 @@ import time import structlog -from koi_net.interfaces.entrypoint import EntryPoint +from .entrypoint import EntryPoint from .processor.kobj_queue import KobjQueue from .lifecycle import NodeLifecycle from .network.resolver import NetworkResolver -from .config import NodeConfig +from .config.core import NodeConfig log = structlog.stdlib.get_logger() diff --git a/src/koi_net/processor/event_worker.py b/src/koi_net/processor/event_worker.py index 3e25010..cdad7bc 100644 --- a/src/koi_net/processor/event_worker.py +++ b/src/koi_net/processor/event_worker.py @@ -6,7 +6,7 @@ from rid_lib.ext import Cache from rid_lib.types import KoiNetNode -from koi_net.config import NodeConfig +from koi_net.config.core import NodeConfig from koi_net.network.event_queue import EventQueue, QueuedEvent from koi_net.network.request_handler import RequestHandler from koi_net.network.poll_event_buffer import PollEventBuffer diff --git a/src/koi_net/protocol/node.py b/src/koi_net/protocol/node.py index 20a6fc0..c60d9d2 100644 --- a/src/koi_net/protocol/node.py +++ b/src/koi_net/protocol/node.py @@ -1,5 +1,5 @@ from enum import StrEnum -from pydantic import BaseModel, Field +from pydantic import BaseModel from rid_lib import RIDType @@ -8,8 +8,8 @@ class NodeType(StrEnum): PARTIAL = "PARTIAL" class NodeProvides(BaseModel): - event: list[RIDType] = Field(default_factory=list) - state: list[RIDType] = Field(default_factory=list) + event: list[RIDType] = [] + state: list[RIDType] = [] class NodeProfile(BaseModel): base_url: str | None = None diff --git a/src/koi_net/protocol/secure.py b/src/koi_net/protocol/secure.py index 575089e..c8387f4 100644 --- a/src/koi_net/protocol/secure.py +++ b/src/koi_net/protocol/secure.py @@ -1,3 +1,4 @@ +from rid_lib.types import KoiNetNode import structlog from base64 import b64decode, b64encode from cryptography.hazmat.primitives import hashes @@ -134,6 +135,11 @@ def to_der(self) -> str: ) ).decode() + def to_node_rid(self, name) -> KoiNetNode: + return KoiNetNode( + name=name, + hash=sha256_hash(self.to_der()) + ) def verify(self, signature: str, message: bytes) -> bool: # hashed_message = sha256_hash(message.decode()) diff --git a/src/koi_net/secure.py b/src/koi_net/secure.py index 367a888..62a5978 100644 --- a/src/koi_net/secure.py +++ b/src/koi_net/secure.py @@ -18,7 +18,7 @@ InvalidSignatureError, InvalidTargetError ) -from .config import NodeConfig +from .config.core import NodeConfig log = structlog.stdlib.get_logger() diff --git a/src/koi_net/sentry.py b/src/koi_net/sentry.py deleted file mode 100644 index 822a14d..0000000 --- a/src/koi_net/sentry.py +++ /dev/null @@ -1,13 +0,0 @@ -# import sentry_sdk -# from sentry_sdk.integrations.logging import LoggingIntegration - -# sentry_sdk.init( -# dsn="https://7bbafef3c7dbd652506db3cb2aca9f98@o4510149352357888.ingest.us.sentry.io/4510149355765760", -# # Add data like request headers and IP for users, -# # see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info -# send_default_pii=True, -# enable_logs=True, -# integrations=[ -# LoggingIntegration(sentry_logs_level=None) -# ] -# ) \ No newline at end of file diff --git a/src/koi_net/server.py b/src/koi_net/server.py index fef170d..5bb5f7d 100644 --- a/src/koi_net/server.py +++ b/src/koi_net/server.py @@ -4,14 +4,13 @@ from fastapi import FastAPI, APIRouter from fastapi.responses import JSONResponse -from koi_net.interfaces.entrypoint import EntryPoint - +from .entrypoint import EntryPoint from .network.response_handler import ResponseHandler from .protocol.model_map import API_MODEL_MAP from .protocol.api_models import ErrorResponse from .protocol.errors import ProtocolError from .lifecycle import NodeLifecycle -from .config import NodeConfig +from .config.core import NodeConfig log = structlog.stdlib.get_logger() From 5a64a92bb8582ffbe2ec3444092bebfbfb41dc57 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 21 Oct 2025 22:02:20 -0400 Subject: [PATCH 19/53] restructuring and renaming, queues and buffer use 'push' instead of redundant push_kobj/push_event_to --- examples/coordinator.py | 8 +++--- pyproject.toml | 2 +- src/koi_net/assembler.py | 2 +- src/koi_net/config/loader.py | 7 +---- src/koi_net/core.py | 26 +++++++------------ src/koi_net/default_actions.py | 3 +-- src/koi_net/effector.py | 2 +- src/koi_net/entrypoint.py | 11 -------- src/koi_net/entrypoints/__init__.py | 2 ++ src/koi_net/entrypoints/base.py | 5 ++++ src/koi_net/{ => entrypoints}/poller.py | 18 ++++++------- src/koi_net/{ => entrypoints}/server.py | 18 ++++++------- src/koi_net/handshaker.py | 4 +-- src/koi_net/interfaces/entrypoint.py | 7 ----- src/koi_net/lifecycle.py | 10 +++---- src/koi_net/network/error_handler.py | 2 +- src/koi_net/network/event_queue.py | 2 +- src/koi_net/network/poll_event_buffer.py | 2 +- src/koi_net/network/response_handler.py | 2 +- src/koi_net/{ => processor}/context.py | 12 ++++----- src/koi_net/processor/handler.py | 5 +++- .../{handlers.py => knowledge_handlers.py} | 12 ++++----- src/koi_net/processor/knowledge_object.py | 5 ++-- src/koi_net/processor/kobj_queue.py | 2 +- src/koi_net/processor/pipeline.py | 4 +-- src/koi_net/secure.py | 1 - src/koi_net/workers/__init__.py | 2 ++ .../{interfaces/worker.py => workers/base.py} | 0 .../{processor => workers}/event_worker.py | 16 ++++++------ .../{processor => workers}/kobj_worker.py | 6 ++--- 30 files changed, 88 insertions(+), 110 deletions(-) delete mode 100644 src/koi_net/entrypoint.py create mode 100644 src/koi_net/entrypoints/__init__.py create mode 100644 src/koi_net/entrypoints/base.py rename src/koi_net/{ => entrypoints}/poller.py (71%) rename src/koi_net/{ => entrypoints}/server.py (88%) delete mode 100644 src/koi_net/interfaces/entrypoint.py rename src/koi_net/{ => processor}/context.py (81%) rename src/koi_net/processor/{handlers.py => knowledge_handlers.py} (96%) create mode 100644 src/koi_net/workers/__init__.py rename src/koi_net/{interfaces/worker.py => workers/base.py} (100%) rename src/koi_net/{processor => workers}/event_worker.py (89%) rename src/koi_net/{processor => workers}/kobj_worker.py (88%) diff --git a/examples/coordinator.py b/examples/coordinator.py index f66cce3..ad79c7b 100644 --- a/examples/coordinator.py +++ b/examples/coordinator.py @@ -9,7 +9,7 @@ NodeProvides ) from koi_net.core import FullNode -from koi_net.context import HandlerContext +from koi_net.processor.context import HandlerContext from koi_net.processor.handler import HandlerType, KnowledgeHandler from koi_net.processor.knowledge_object import KnowledgeObject from koi_net.protocol.event import Event, EventType @@ -51,7 +51,7 @@ def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): logger.info("Sharing this node's bundle with peer") identity_bundle = ctx.cache.read(ctx.identity.rid) - ctx.event_queue.push_event_to( + ctx.event_queue.push( event=Event.from_bundle(EventType.NEW, identity_bundle), target=kobj.rid ) @@ -66,8 +66,8 @@ def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): rid_types=[KoiNetNode, KoiNetEdge] ) - ctx.kobj_queue.put_kobj(rid=edge_bundle.rid, event_type=EventType.FORGET) - ctx.kobj_queue.put_kobj(bundle=edge_bundle) + ctx.kobj_queue.push(rid=edge_bundle.rid, event_type=EventType.FORGET) + ctx.kobj_queue.push(bundle=edge_bundle) class CoordinatorNode(FullNode): config = CoordinatorConfig diff --git a/pyproject.toml b/pyproject.toml index b4653a8..975029a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "koi-net" -version = "1.2.0b2" +version = "1.2.0b3" description = "Implementation of KOI-net protocol in Python" authors = [ {name = "Luke Miller", email = "luke@block.science"} diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index ffcd723..b528f90 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -5,7 +5,7 @@ from pydantic import BaseModel import structlog -from .interfaces.entrypoint import EntryPoint +from .entrypoints.base import EntryPoint log = structlog.stdlib.get_logger() diff --git a/src/koi_net/config/loader.py b/src/koi_net/config/loader.py index 70db14d..92ceced 100644 --- a/src/koi_net/config/loader.py +++ b/src/koi_net/config/loader.py @@ -16,12 +16,7 @@ def __getattr__(self, name): return getattr(self._config, name) def load_from_yaml(self): - """Loads config state from YAML file. - - Defaults to `config.yaml`. If `generate_missing` is set to - `True`, a private key and RID will be generated if not already - present in the config. - """ + """Loads config state from YAML file.""" yaml = YAML() try: diff --git a/src/koi_net/core.py b/src/koi_net/core.py index c87a87c..692a1c3 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -3,11 +3,11 @@ from .config.loader import ConfigLoader from .assembler import NodeAssembler from .config.core import NodeConfig -from .context import HandlerContext +from .processor.context import HandlerContext from .effector import Effector from .handshaker import Handshaker from .identity import NodeIdentity -from .processor.kobj_worker import KnowledgeProcessingWorker +from .workers import KnowledgeProcessingWorker, EventProcessingWorker from .lifecycle import NodeLifecycle from .network.error_handler import ErrorHandler from .network.event_queue import EventQueue @@ -16,8 +16,11 @@ from .network.resolver import NetworkResolver from .network.response_handler import ResponseHandler from .network.poll_event_buffer import PollEventBuffer -from .poller import NodePoller -from .processor.handlers import ( +from .processor.pipeline import KnowledgePipeline +from .processor.kobj_queue import KobjQueue +from .secure import Secure +from .entrypoints import NodeServer, NodePoller +from .processor.knowledge_handlers import ( basic_manifest_handler, basic_network_output_filter, basic_rid_handler, @@ -26,18 +29,6 @@ forget_edge_on_node_deletion, secure_profile_handler ) -from .processor.event_worker import EventProcessingWorker -from .processor.pipeline import KnowledgePipeline -from .processor.kobj_queue import KobjQueue -from .secure import Secure -from .server import NodeServer - - -# factory functions for components with non standard initializiation - - -def make_cache(config: NodeConfig) -> Cache: - return Cache(directory_path=config.koi_net.cache_directory_path) class BaseNode(NodeAssembler): @@ -55,7 +46,8 @@ class BaseNode(NodeAssembler): basic_network_output_filter, forget_edge_on_node_deletion ] - cache = make_cache + cache = lambda config: Cache( + directory_path=config.koi_net.cache_directory_path) identity = NodeIdentity graph = NetworkGraph secure = Secure diff --git a/src/koi_net/default_actions.py b/src/koi_net/default_actions.py index cd0550c..52d1a56 100644 --- a/src/koi_net/default_actions.py +++ b/src/koi_net/default_actions.py @@ -1,9 +1,8 @@ """Implementations of default dereference actions.""" -from .context import ActionContext from rid_lib.types import KoiNetNode from rid_lib.ext import Bundle -from .effector import Effector +from .effector import Effector, ActionContext @Effector.register_default_action(KoiNetNode) diff --git a/src/koi_net/effector.py b/src/koi_net/effector.py index f5871a1..16b1449 100644 --- a/src/koi_net/effector.py +++ b/src/koi_net/effector.py @@ -155,7 +155,7 @@ def deref( and bundle is not None and source != BundleSource.CACHE ): - self.kobj_queue.put_kobj( + self.kobj_queue.push( bundle=bundle, source=source if type(source) is KoiNetNode else None ) diff --git a/src/koi_net/entrypoint.py b/src/koi_net/entrypoint.py deleted file mode 100644 index 867c8c1..0000000 --- a/src/koi_net/entrypoint.py +++ /dev/null @@ -1,11 +0,0 @@ -from koi_net.config.core import NodeConfig - - -class EntryPoint: - def __init__(self, config: NodeConfig): - self.config = config - - def run(self): ... - - def initialize(self): - self.config.load_from_yaml() \ No newline at end of file diff --git a/src/koi_net/entrypoints/__init__.py b/src/koi_net/entrypoints/__init__.py new file mode 100644 index 0000000..e3d40bf --- /dev/null +++ b/src/koi_net/entrypoints/__init__.py @@ -0,0 +1,2 @@ +from .poller import NodePoller +from .server import NodeServer \ No newline at end of file diff --git a/src/koi_net/entrypoints/base.py b/src/koi_net/entrypoints/base.py new file mode 100644 index 0000000..3e58fd2 --- /dev/null +++ b/src/koi_net/entrypoints/base.py @@ -0,0 +1,5 @@ +from koi_net.config.core import NodeConfig + + +class EntryPoint: + def run(self): ... \ No newline at end of file diff --git a/src/koi_net/poller.py b/src/koi_net/entrypoints/poller.py similarity index 71% rename from src/koi_net/poller.py rename to src/koi_net/entrypoints/poller.py index 9f4d577..35c6d2d 100644 --- a/src/koi_net/poller.py +++ b/src/koi_net/entrypoints/poller.py @@ -2,11 +2,11 @@ import time import structlog -from .entrypoint import EntryPoint -from .processor.kobj_queue import KobjQueue -from .lifecycle import NodeLifecycle -from .network.resolver import NetworkResolver -from .config.core import NodeConfig +from .base import EntryPoint +from ..processor.kobj_queue import KobjQueue +from ..lifecycle import NodeLifecycle +from ..network.resolver import NetworkResolver +from ..config.partial_node import PartialNodeConfig log = structlog.stdlib.get_logger() @@ -16,11 +16,11 @@ class NodePoller(EntryPoint): kobj_queue: KobjQueue lifecycle: NodeLifecycle resolver: NetworkResolver - config: NodeConfig + config: PartialNodeConfig def __init__( self, - config: NodeConfig, + config: PartialNodeConfig, lifecycle: NodeLifecycle, kobj_queue: KobjQueue, resolver: NetworkResolver, @@ -35,7 +35,7 @@ def poll(self): neighbors = self.resolver.poll_neighbors() for node_rid in neighbors: for event in neighbors[node_rid]: - self.kobj_queue.put_kobj(event=event, source=node_rid) + self.kobj_queue.push(event=event, source=node_rid) def run(self): """Runs polling event loop.""" @@ -44,6 +44,6 @@ def run(self): start_time = time.time() self.poll() elapsed = time.time() - start_time - sleep_time = self.config.koi_net.polling_interval - elapsed + sleep_time = self.config.poller.polling_interval - elapsed if sleep_time > 0: time.sleep(sleep_time) \ No newline at end of file diff --git a/src/koi_net/server.py b/src/koi_net/entrypoints/server.py similarity index 88% rename from src/koi_net/server.py rename to src/koi_net/entrypoints/server.py index 5bb5f7d..c299f9a 100644 --- a/src/koi_net/server.py +++ b/src/koi_net/entrypoints/server.py @@ -4,20 +4,20 @@ from fastapi import FastAPI, APIRouter from fastapi.responses import JSONResponse -from .entrypoint import EntryPoint -from .network.response_handler import ResponseHandler -from .protocol.model_map import API_MODEL_MAP -from .protocol.api_models import ErrorResponse -from .protocol.errors import ProtocolError -from .lifecycle import NodeLifecycle -from .config.core import NodeConfig +from .base import EntryPoint +from ..network.response_handler import ResponseHandler +from ..protocol.model_map import API_MODEL_MAP +from ..protocol.api_models import ErrorResponse +from ..protocol.errors import ProtocolError +from ..lifecycle import NodeLifecycle +from ..config.full_node import FullNodeConfig log = structlog.stdlib.get_logger() class NodeServer(EntryPoint): """Manages FastAPI server and event handling for full nodes.""" - config: NodeConfig + config: FullNodeConfig lifecycle: NodeLifecycle response_handler: ResponseHandler app: FastAPI @@ -25,7 +25,7 @@ class NodeServer(EntryPoint): def __init__( self, - config: NodeConfig, + config: FullNodeConfig, lifecycle: NodeLifecycle, response_handler: ResponseHandler, ): diff --git a/src/koi_net/handshaker.py b/src/koi_net/handshaker.py index be6bfe7..f738446 100644 --- a/src/koi_net/handshaker.py +++ b/src/koi_net/handshaker.py @@ -25,13 +25,13 @@ def handshake_with(self, target: KoiNetNode): reset the target's cache in case it already knew this node. """ log.debug(f"Initiating handshake with {target}") - self.event_queue.push_event_to( + self.event_queue.push( Event.from_rid( event_type=EventType.FORGET, rid=self.identity.rid), target=target ) - self.event_queue.push_event_to( + self.event_queue.push( event=Event.from_bundle( event_type=EventType.NEW, bundle=self.cache.read(self.identity.rid)), diff --git a/src/koi_net/interfaces/entrypoint.py b/src/koi_net/interfaces/entrypoint.py deleted file mode 100644 index a315ff0..0000000 --- a/src/koi_net/interfaces/entrypoint.py +++ /dev/null @@ -1,7 +0,0 @@ -from typing import Protocol - - -class EntryPoint(Protocol): - def run(self): ... - - def initialize(self): ... \ No newline at end of file diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index b5df7b8..f648b98 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -6,11 +6,11 @@ from .handshaker import Handshaker from .network.request_handler import RequestHandler -from .processor.kobj_worker import KnowledgeProcessingWorker +from .workers.kobj_worker import KnowledgeProcessingWorker from .network.event_queue import EventQueue -from .processor.event_worker import EventProcessingWorker +from .workers import EventProcessingWorker from .protocol.api_models import ErrorResponse -from .interfaces.worker import STOP_WORKER +from .workers.base import STOP_WORKER from .config.core import NodeConfig from .processor.kobj_queue import KobjQueue from .network.graph import NetworkGraph @@ -99,7 +99,7 @@ def start(self): # refresh to reflect changes (if any) in config.yaml - self.kobj_queue.put_kobj(bundle=Bundle.generate( + self.kobj_queue.push(bundle=Bundle.generate( rid=self.identity.rid, contents=self.identity.profile.model_dump() )) @@ -119,7 +119,7 @@ def start(self): continue for manifest in payload.manifests: - self.kobj_queue.put_kobj( + self.kobj_queue.push( manifest=manifest, source=coordinator ) diff --git a/src/koi_net/network/error_handler.py b/src/koi_net/network/error_handler.py index 0e7fd47..0ca558c 100644 --- a/src/koi_net/network/error_handler.py +++ b/src/koi_net/network/error_handler.py @@ -31,7 +31,7 @@ def handle_connection_error(self, node: KoiNetNode): if self.timeout_counter[node] > 3: log.debug(f"Exceeded time out limit, forgetting node") - self.kobj_queue.put_kobj(rid=node, event_type=EventType.FORGET) + self.kobj_queue.push(rid=node, event_type=EventType.FORGET) # do something diff --git a/src/koi_net/network/event_queue.py b/src/koi_net/network/event_queue.py index 8db2db1..53e9feb 100644 --- a/src/koi_net/network/event_queue.py +++ b/src/koi_net/network/event_queue.py @@ -20,7 +20,7 @@ class EventQueue: def __init__(self): self.q = Queue() - def push_event_to(self, event: Event, target: KoiNetNode): + def push(self, event: Event, target: KoiNetNode): """Pushes event to queue of specified node. Event will be sent to webhook or poll queue depending on the diff --git a/src/koi_net/network/poll_event_buffer.py b/src/koi_net/network/poll_event_buffer.py index d26d2a6..310212e 100644 --- a/src/koi_net/network/poll_event_buffer.py +++ b/src/koi_net/network/poll_event_buffer.py @@ -9,7 +9,7 @@ class PollEventBuffer: def __init__(self): self.buffers = dict() - def put(self, node: KoiNetNode, event: Event): + def push(self, node: KoiNetNode, event: Event): event_buf = self.buffers.setdefault(node, []) event_buf.append(event) diff --git a/src/koi_net/network/response_handler.py b/src/koi_net/network/response_handler.py index 9a7fbe7..ff0656c 100644 --- a/src/koi_net/network/response_handler.py +++ b/src/koi_net/network/response_handler.py @@ -70,7 +70,7 @@ def broadcast_events_handler(self, req: EventsPayload, source: KoiNetNode): log.info(f"Request to broadcast events, received {len(req.events)} event(s)") for event in req.events: - self.kobj_queue.put_kobj(event=event, source=source) + self.kobj_queue.push(event=event, source=source) def poll_events_handler( self, diff --git a/src/koi_net/context.py b/src/koi_net/processor/context.py similarity index 81% rename from src/koi_net/context.py rename to src/koi_net/processor/context.py index 3127172..bfa5f1d 100644 --- a/src/koi_net/context.py +++ b/src/koi_net/processor/context.py @@ -2,12 +2,12 @@ from koi_net.effector import Effector from koi_net.network.resolver import NetworkResolver -from .config.core import NodeConfig -from .network.graph import NetworkGraph -from .network.event_queue import EventQueue -from .network.request_handler import RequestHandler -from .identity import NodeIdentity -from .processor.kobj_queue import KobjQueue +from ..config.core import NodeConfig +from ..network.graph import NetworkGraph +from ..network.event_queue import EventQueue +from ..network.request_handler import RequestHandler +from ..identity import NodeIdentity +from .kobj_queue import KobjQueue class HandlerContext: diff --git a/src/koi_net/processor/handler.py b/src/koi_net/processor/handler.py index 6ded184..9bdb07e 100644 --- a/src/koi_net/processor/handler.py +++ b/src/koi_net/processor/handler.py @@ -2,7 +2,10 @@ from enum import StrEnum from typing import Callable from rid_lib import RIDType + from ..protocol.event import EventType +from .knowledge_object import KnowledgeObject +from .context import HandlerContext class StopChain: @@ -32,7 +35,7 @@ class HandlerType(StrEnum): class KnowledgeHandler: """Handles knowledge processing events of the provided types.""" - func: Callable + func: Callable[[HandlerContext, KnowledgeObject], None | KnowledgeObject | StopChain] handler_type: HandlerType rid_types: list[RIDType] | None event_types: list[EventType | None] | None = None diff --git a/src/koi_net/processor/handlers.py b/src/koi_net/processor/knowledge_handlers.py similarity index 96% rename from src/koi_net/processor/handlers.py rename to src/koi_net/processor/knowledge_handlers.py index 0ac899e..f90314e 100644 --- a/src/koi_net/processor/handlers.py +++ b/src/koi_net/processor/knowledge_handlers.py @@ -7,7 +7,7 @@ from koi_net.protocol.node import NodeType from .handler import KnowledgeHandler, HandlerType, STOP_CHAIN from .knowledge_object import KnowledgeObject -from ..context import HandlerContext +from .context import HandlerContext from ..protocol.event import Event, EventType from ..protocol.edge import EdgeProfile, EdgeStatus, EdgeType from ..protocol.node import NodeProfile @@ -135,7 +135,7 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): if abort: event = Event.from_rid(EventType.FORGET, kobj.rid) - ctx.event_queue.push_event_to(event, peer_rid, flush=True) + ctx.event_queue.push(event, peer_rid, flush=True) return STOP_CHAIN else: @@ -144,7 +144,7 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): edge_profile.status = EdgeStatus.APPROVED updated_bundle = Bundle.generate(kobj.rid, edge_profile.model_dump()) - ctx.kobj_queue.put_kobj(bundle=updated_bundle, event_type=EventType.UPDATE) + ctx.kobj_queue.push(bundle=updated_bundle, event_type=EventType.UPDATE) return elif edge_profile.target == ctx.identity.rid: @@ -216,7 +216,7 @@ def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): # queued for processing edge_bundle = Bundle.generate(edge_rid, edge_profile.model_dump()) - ctx.kobj_queue.put_kobj(bundle=edge_bundle) + ctx.kobj_queue.push(bundle=edge_bundle) log.info("Catching up on network state") @@ -234,7 +234,7 @@ def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): # marked as external since we are handling RIDs from another node # will fetch remotely instead of checking local cache - ctx.kobj_queue.put_kobj(rid=rid, source=kobj.rid) + ctx.kobj_queue.push(rid=rid, source=kobj.rid) log.info("Done") @@ -296,4 +296,4 @@ def forget_edge_on_node_deletion(ctx: HandlerContext, kobj: KnowledgeObject): if kobj.rid in (edge_profile.source, edge_profile.target): log.debug("Identified edge with forgotten node") - ctx.kobj_queue.put_kobj(rid=edge_rid, event_type=EventType.FORGET) \ No newline at end of file + ctx.kobj_queue.push(rid=edge_rid, event_type=EventType.FORGET) \ No newline at end of file diff --git a/src/koi_net/processor/knowledge_object.py b/src/koi_net/processor/knowledge_object.py index 82713b3..0c8c240 100644 --- a/src/koi_net/processor/knowledge_object.py +++ b/src/koi_net/processor/knowledge_object.py @@ -1,8 +1,7 @@ from pydantic import BaseModel from rid_lib import RID -from rid_lib.ext import Manifest -from rid_lib.ext.bundle import Bundle -from rid_lib.types.koi_net_node import KoiNetNode +from rid_lib.ext import Manifest, Bundle +from rid_lib.types import KoiNetNode from ..protocol.event import Event, EventType diff --git a/src/koi_net/processor/kobj_queue.py b/src/koi_net/processor/kobj_queue.py index 1ed55b6..e3a44ae 100644 --- a/src/koi_net/processor/kobj_queue.py +++ b/src/koi_net/processor/kobj_queue.py @@ -16,7 +16,7 @@ class KobjQueue: def __init__(self): self.q = Queue() - def put_kobj( + def push( self, rid: RID | None = None, manifest: Manifest | None = None, diff --git a/src/koi_net/processor/pipeline.py b/src/koi_net/processor/pipeline.py index 0e6fd81..576fa6d 100644 --- a/src/koi_net/processor/pipeline.py +++ b/src/koi_net/processor/pipeline.py @@ -13,7 +13,7 @@ StopChain ) from .knowledge_object import KnowledgeObject -from ..context import HandlerContext +from .context import HandlerContext log = structlog.stdlib.get_logger() @@ -193,6 +193,6 @@ def process(self, kobj: KnowledgeObject): log.debug("No network targets set") for node in kobj.network_targets: - self.event_queue.push_event_to(kobj.normalized_event, node) + self.event_queue.push(kobj.normalized_event, node) kobj = self.call_handler_chain(HandlerType.Final, kobj) diff --git a/src/koi_net/secure.py b/src/koi_net/secure.py index 62a5978..da89327 100644 --- a/src/koi_net/secure.py +++ b/src/koi_net/secure.py @@ -1,6 +1,5 @@ import structlog from functools import wraps - import cryptography.exceptions from rid_lib.ext import Bundle, Cache from rid_lib.ext.utils import sha256_hash diff --git a/src/koi_net/workers/__init__.py b/src/koi_net/workers/__init__.py new file mode 100644 index 0000000..52330fd --- /dev/null +++ b/src/koi_net/workers/__init__.py @@ -0,0 +1,2 @@ +from .event_worker import EventProcessingWorker +from .kobj_worker import KnowledgeProcessingWorker \ No newline at end of file diff --git a/src/koi_net/interfaces/worker.py b/src/koi_net/workers/base.py similarity index 100% rename from src/koi_net/interfaces/worker.py rename to src/koi_net/workers/base.py diff --git a/src/koi_net/processor/event_worker.py b/src/koi_net/workers/event_worker.py similarity index 89% rename from src/koi_net/processor/event_worker.py rename to src/koi_net/workers/event_worker.py index cdad7bc..50f49e2 100644 --- a/src/koi_net/processor/event_worker.py +++ b/src/koi_net/workers/event_worker.py @@ -6,13 +6,13 @@ from rid_lib.ext import Cache from rid_lib.types import KoiNetNode -from koi_net.config.core import NodeConfig -from koi_net.network.event_queue import EventQueue, QueuedEvent -from koi_net.network.request_handler import RequestHandler -from koi_net.network.poll_event_buffer import PollEventBuffer -from koi_net.protocol.event import Event -from koi_net.protocol.node import NodeProfile, NodeType -from koi_net.interfaces.worker import ThreadWorker, STOP_WORKER +from ..config.core import NodeConfig +from ..network.event_queue import EventQueue, QueuedEvent +from ..network.request_handler import RequestHandler +from ..network.poll_event_buffer import PollEventBuffer +from ..protocol.event import Event +from ..protocol.node import NodeProfile, NodeType +from .base import ThreadWorker, STOP_WORKER log = structlog.stdlib.get_logger() @@ -63,7 +63,7 @@ def decide_event(self, item: QueuedEvent) -> bool: return True elif node_profile.node_type == NodeType.PARTIAL: - self.poll_event_buf.put(item.target, item.event) + self.poll_event_buf.push(item.target, item.event) return False elif item.target == self.config.koi_net.first_contact.rid: diff --git a/src/koi_net/processor/kobj_worker.py b/src/koi_net/workers/kobj_worker.py similarity index 88% rename from src/koi_net/processor/kobj_worker.py rename to src/koi_net/workers/kobj_worker.py index 5b35387..524e7cc 100644 --- a/src/koi_net/processor/kobj_worker.py +++ b/src/koi_net/workers/kobj_worker.py @@ -2,9 +2,9 @@ import traceback import structlog -from .pipeline import KnowledgePipeline -from .kobj_queue import KobjQueue -from koi_net.interfaces.worker import ThreadWorker, STOP_WORKER +from ..processor.pipeline import KnowledgePipeline +from ..processor.kobj_queue import KobjQueue +from .base import ThreadWorker, STOP_WORKER log = structlog.stdlib.get_logger() From 82ba5c6c70af043993763b76fca134af4a49b0d9 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Fri, 31 Oct 2025 13:49:24 -0400 Subject: [PATCH 20/53] moved config system into a node component, removed old logging system from example nodes --- .gitignore | 1 + examples/coordinator.py | 21 ++--- examples/partial.py | 16 +--- requirements.txt | 9 -- src/koi_net/__init__.py | 1 - src/koi_net/assembler.py | 2 +- src/koi_net/core.py | 2 + src/koi_net/log_system.py | 139 ++++++++++++++++++++++++++++++ src/koi_net/logger.py | 176 -------------------------------------- 9 files changed, 150 insertions(+), 217 deletions(-) delete mode 100644 requirements.txt create mode 100644 src/koi_net/log_system.py delete mode 100644 src/koi_net/logger.py diff --git a/.gitignore b/.gitignore index b5af389..1fb91bd 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ rid-lib __pycache__ *.pem *.yaml +*.ndjson* venv .env prototypes diff --git a/examples/coordinator.py b/examples/coordinator.py index ad79c7b..4ded18c 100644 --- a/examples/coordinator.py +++ b/examples/coordinator.py @@ -1,6 +1,5 @@ -import logging -from rich.logging import RichHandler from rid_lib.types import KoiNetNode, KoiNetEdge +import structlog from koi_net.config.full_node import ( FullNodeConfig, ServerConfig, @@ -15,15 +14,7 @@ from koi_net.protocol.event import Event, EventType from koi_net.protocol.edge import EdgeType, generate_edge_bundle -logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", - handlers=[RichHandler()] -) - -logging.getLogger("koi_net").setLevel(logging.DEBUG) -logger = logging.getLogger(__name__) +log = structlog.stdlib.get_logger() class CoordinatorConfig(FullNodeConfig): @@ -43,20 +34,20 @@ class CoordinatorConfig(FullNodeConfig): HandlerType.Network, rid_types=[KoiNetNode]) def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): - logger.info("Handling node handshake") + log.info("Handling node handshake") # only respond if node declares itself as NEW if kobj.event_type != EventType.NEW: return - logger.info("Sharing this node's bundle with peer") + log.info("Sharing this node's bundle with peer") identity_bundle = ctx.cache.read(ctx.identity.rid) ctx.event_queue.push( event=Event.from_bundle(EventType.NEW, identity_bundle), target=kobj.rid ) - logger.info("Proposing new edge") + log.info("Proposing new edge") # defer handling of proposed edge edge_bundle = generate_edge_bundle( @@ -70,7 +61,7 @@ def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): ctx.kobj_queue.push(bundle=edge_bundle) class CoordinatorNode(FullNode): - config = CoordinatorConfig + config_cls = CoordinatorConfig knowledge_handlers = FullNode.knowledge_handlers + [handshake_handler] if __name__ == "__main__": diff --git a/examples/partial.py b/examples/partial.py index f0f66fe..5c634ab 100644 --- a/examples/partial.py +++ b/examples/partial.py @@ -1,20 +1,7 @@ -import logging -from rich.logging import RichHandler from koi_net.config.partial_node import PartialNodeConfig, KoiNetConfig, NodeProfile from koi_net.core import PartialNode -logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", - handlers=[RichHandler()] -) - -logging.getLogger("koi_net").setLevel(logging.DEBUG) -logger = logging.getLogger(__name__) - - class MyPartialNodeConfig(PartialNodeConfig): koi_net: KoiNetConfig = KoiNetConfig( node_name="partial", @@ -24,7 +11,6 @@ class MyPartialNodeConfig(PartialNodeConfig): class MyPartialNode(PartialNode): config_cls = MyPartialNodeConfig - if __name__ == "__main__": node = MyPartialNode() - # node.entrypoint.run() \ No newline at end of file + node.entrypoint.run() \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 2c79f86..0000000 --- a/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -networkx>=3.4.2 -rid-lib>=3.2.1 -httpx>=0.28.1 -pydantic>=2.10.6 - -# requirements for examples/ -rich -fastapi -uvicorn \ No newline at end of file diff --git a/src/koi_net/__init__.py b/src/koi_net/__init__.py index 906398a..e69de29 100644 --- a/src/koi_net/__init__.py +++ b/src/koi_net/__init__.py @@ -1 +0,0 @@ -from . import logger diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index b528f90..a7cce7f 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -48,7 +48,7 @@ def _build(cls) -> NodeContainer: if comp is None: raise Exception(f"Couldn't find factory for component '{comp_name}'") - print(comp_name) + # print(comp_name) if not callable(comp): print(f"Treating {comp_name} as a literal") diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 692a1c3..c5521b0 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,5 +1,6 @@ from rid_lib.ext import Cache +from .log_system import LogSystem from .config.loader import ConfigLoader from .assembler import NodeAssembler from .config.core import NodeConfig @@ -32,6 +33,7 @@ class BaseNode(NodeAssembler): + log_system = LogSystem config_cls = NodeConfig kobj_queue = KobjQueue event_queue = EventQueue diff --git a/src/koi_net/log_system.py b/src/koi_net/log_system.py new file mode 100644 index 0000000..44e680d --- /dev/null +++ b/src/koi_net/log_system.py @@ -0,0 +1,139 @@ +import sys +import logging +from logging.handlers import RotatingFileHandler +from datetime import datetime +import structlog +import colorama + + +console_renderer = structlog.dev.ConsoleRenderer( + columns=[ + # Render the timestamp without the key name in yellow. + structlog.dev.Column( + "timestamp", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Style.DIM, + reset_style=colorama.Style.RESET_ALL, + value_repr=lambda t: datetime.fromisoformat(t).strftime("%Y-%m-%d %H:%M:%S"), + ), + ), + structlog.dev.Column( + "level", + structlog.dev.LogLevelColumnFormatter( + level_styles={ + level: colorama.Style.BRIGHT + color + for level, color in { + "critical": colorama.Fore.RED, + "exception": colorama.Fore.RED, + "error": colorama.Fore.RED, + "warn": colorama.Fore.YELLOW, + "warning": colorama.Fore.YELLOW, + "info": colorama.Fore.GREEN, + "debug": colorama.Fore.GREEN, + "notset": colorama.Back.RED, + }.items() + }, + reset_style=colorama.Style.RESET_ALL, + width=9 + ) + ), + # Render the event without the key name in bright magenta. + + # Default formatter for all keys not explicitly mentioned. The key is + # cyan, the value is green. + structlog.dev.Column( + "path", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Fore.MAGENTA, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + width=30 + ), + ), + # structlog.dev.Column( + # "func_name", + # structlog.dev.KeyValueColumnFormatter( + # key_style=None, + # value_style=colorama.Fore.MAGENTA, + # reset_style=colorama.Style.RESET_ALL, + # value_repr=str, + # prefix="(", + # postfix=")", + # width=15 + # ), + # ), + structlog.dev.Column( + "event", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Fore.WHITE, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + width=30 + ), + ), + structlog.dev.Column( + "", + structlog.dev.KeyValueColumnFormatter( + key_style=colorama.Fore.BLUE, + value_style=colorama.Fore.GREEN, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + ), + ) + ] +) + +class LogSystem: + def __init__(self): + file_handler = RotatingFileHandler( + filename="log.ndjson", + maxBytes=10 * 1024 * 1024, + backupCount=5, + encoding="utf-8" + ) + file_handler.setFormatter( + structlog.stdlib.ProcessorFormatter( + processor=structlog.processors.JSONRenderer() + ) + ) + + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter( + structlog.stdlib.ProcessorFormatter( + processor=console_renderer + ) + ) + + logging.basicConfig( + level=logging.DEBUG, + handlers=[file_handler, console_handler] + ) + + structlog.configure( + processors=[ + structlog.stdlib.filter_by_level, + structlog.stdlib.add_logger_name, + structlog.stdlib.add_log_level, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.TimeStamper(fmt="iso"), + # structlog.processors.StackInfoRenderer(), + structlog.processors.UnicodeDecoder(), + structlog.processors.CallsiteParameterAdder({ + structlog.processors.CallsiteParameter.MODULE, + structlog.processors.CallsiteParameter.FUNC_NAME + }), + # lambda _, __, event: { + # **event, + # "path": event["module"] + "." + event["func_name"] + # }, + # console_renderer + structlog.stdlib.ProcessorFormatter.wrap_for_formatter + + ], + wrapper_class=structlog.stdlib.BoundLogger, + logger_factory=structlog.stdlib.LoggerFactory(), + cache_logger_on_first_use=True, + ) \ No newline at end of file diff --git a/src/koi_net/logger.py b/src/koi_net/logger.py deleted file mode 100644 index 37f59a2..0000000 --- a/src/koi_net/logger.py +++ /dev/null @@ -1,176 +0,0 @@ -from datetime import datetime -import logging -from logging.handlers import RotatingFileHandler -import colorama -import structlog -import sys -# from sentry_sdk import logger as sentry_logger - - -def my_processor(_, __, event: dict): - # print(_, __, event) - event["path"] = event["module"] + "." + event["func_name"] - return event - -# def sentry_processor(_, method, event: dict): -# print(event) -# if method == "critical": -# sentry_logger.fatal( -# event["event"], -# attributes=event -# ) -# elif method == "info": -# sentry_logger.info( -# event["event"], -# attributes=event -# ) -# elif method == "debug": -# sentry_logger.debug( -# event["event"], -# attributes=event -# ) -# return event - -console_renderer = structlog.dev.ConsoleRenderer( - columns=[ - # Render the timestamp without the key name in yellow. - structlog.dev.Column( - "timestamp", - structlog.dev.KeyValueColumnFormatter( - key_style=None, - value_style=colorama.Style.DIM, - reset_style=colorama.Style.RESET_ALL, - value_repr=lambda t: datetime.fromisoformat(t).strftime("%Y-%m-%d %H:%M:%S"), - ), - ), - structlog.dev.Column( - "level", - structlog.dev.LogLevelColumnFormatter( - level_styles={ - level: colorama.Style.BRIGHT + color - for level, color in { - "critical": colorama.Fore.RED, - "exception": colorama.Fore.RED, - "error": colorama.Fore.RED, - "warn": colorama.Fore.YELLOW, - "warning": colorama.Fore.YELLOW, - "info": colorama.Fore.GREEN, - "debug": colorama.Fore.GREEN, - "notset": colorama.Back.RED, - }.items() - }, - reset_style=colorama.Style.RESET_ALL, - width=9 - ) - ), - # Render the event without the key name in bright magenta. - - # Default formatter for all keys not explicitly mentioned. The key is - # cyan, the value is green. - structlog.dev.Column( - "path", - structlog.dev.KeyValueColumnFormatter( - key_style=None, - value_style=colorama.Fore.MAGENTA, - reset_style=colorama.Style.RESET_ALL, - value_repr=str, - width=30 - ), - ), - # structlog.dev.Column( - # "func_name", - # structlog.dev.KeyValueColumnFormatter( - # key_style=None, - # value_style=colorama.Fore.MAGENTA, - # reset_style=colorama.Style.RESET_ALL, - # value_repr=str, - # prefix="(", - # postfix=")", - # width=15 - # ), - # ), - structlog.dev.Column( - "event", - structlog.dev.KeyValueColumnFormatter( - key_style=None, - value_style=colorama.Fore.WHITE, - reset_style=colorama.Style.RESET_ALL, - value_repr=str, - width=30 - ), - ), - structlog.dev.Column( - "", - structlog.dev.KeyValueColumnFormatter( - key_style=colorama.Fore.BLUE, - value_style=colorama.Fore.GREEN, - reset_style=colorama.Style.RESET_ALL, - value_repr=str, - ), - ) - ] -) - -structlog.configure( - processors=[ - # If log level is too low, abort pipeline and throw away log entry. - structlog.stdlib.filter_by_level, - # Add the name of the logger to event dict. - structlog.stdlib.add_logger_name, - # Add log level to event dict. - structlog.stdlib.add_log_level, - # Perform %-style formatting. - structlog.stdlib.PositionalArgumentsFormatter(), - # Add a timestamp in ISO 8601 format. - structlog.processors.TimeStamper(fmt="iso"), - # If the "stack_info" key in the event dict is true, remove it and - # render the current stack trace in the "stack" key. - structlog.processors.StackInfoRenderer(), - # If the "exc_info" key in the event dict is either true or a - # sys.exc_info() tuple, remove "exc_info" and render the exception - # with traceback into the "exception" key. - # structlog.processors.format_exc_info, - # If some value is in bytes, decode it to a Unicode str. - structlog.processors.UnicodeDecoder(), - # Add callsite parameters. - structlog.processors.CallsiteParameterAdder( - { - structlog.processors.CallsiteParameter.MODULE, - structlog.processors.CallsiteParameter.FUNC_NAME, - # structlog.processors.CallsiteParameter.LINENO, - } - ), - my_processor, - # Render the final event dict as JSON. - # sentry_processor, - console_renderer - # structlog.processors.JSONRenderer() - - ], - # `wrapper_class` is the bound logger that you get back from - # get_logger(). This one imitates the API of `logging.Logger`. - wrapper_class=structlog.stdlib.BoundLogger, - # `logger_factory` is used to create wrapped loggers that are used for - # OUTPUT. This one returns a `logging.Logger`. The final value (a JSON - # string) from the final processor (`JSONRenderer`) will be passed to - # the method of the same name as that you've called on the bound logger. - logger_factory=structlog.stdlib.LoggerFactory(), - # Effectively freeze configuration after creating the first bound - # logger. - cache_logger_on_first_use=True, -) - -file_handler = RotatingFileHandler( - filename="app.log", - maxBytes=10 * 1024 * 1024, - backupCount=5, - encoding="utf-8" -) - -logging.basicConfig( - format="%(message)s", - stream=sys.stdout, - level=logging.INFO, -) - -# log = structlog.stdlib.get_logger() \ No newline at end of file From 8174bad752c5193cac4bf6c17e77cd4e367d8ef4 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Fri, 7 Nov 2025 11:06:07 -0500 Subject: [PATCH 21/53] updated typing and error logging, added LNAV config file for koi-net ndjson logs --- koi-net.config.json | 76 +++++++++++++++++++++++++++++++ src/koi_net/assembler.py | 18 ++++---- src/koi_net/entrypoints/server.py | 3 +- 3 files changed, 88 insertions(+), 9 deletions(-) create mode 100644 koi-net.config.json diff --git a/koi-net.config.json b/koi-net.config.json new file mode 100644 index 0000000..4c99b8e --- /dev/null +++ b/koi-net.config.json @@ -0,0 +1,76 @@ +{ + "$schema": "https://lnav.org/schemas/format-v1.schema.json", + "koi_net_json_log": { + "title": "KOI-net node logs", + "description": "Detailed logs of node and network behavior", + "file-type": "json", + "body-field": "event", + "level-field": "", + "timestamp-field": "timestamp", + "hide-extra": true, + "line-format": [ + { + "field": "timestamp", + "auto-width": true, + "timestamp-format": "%Y-%m-%d %H:%M:%S" + }, + " ", + { + "field": "level", + "auto-width": true, + "default-value": "", + "prefix": "[", + "suffix": "]" + }, + " ", + { + "field": "event", + "min-width": 70, + "max-width": 70, + "overflow": "truncate" + }, + " - ", + { + "field": "logger", + "overflow": "abbrev", + "min-width": 30, + "max-width": 30, + "default-value": "", + "align": "right" + } + ], + "value": { + "level": { + "kind": "string" + }, + "logger": { + "kind": "string" + }, + "event": { + "kind": "string" + } + }, + "highlights": { + "debug": { + "pattern": "\\[(debug) *\\]", + "color": "Blue" + }, + "info": { + "pattern": "\\[(info) *\\]", + "color": "Green" + }, + "warning": { + "pattern": "\\[(warning) *\\]", + "color": "Yellow" + }, + "error": { + "pattern": "\\[(error|critical) *\\]", + "color": "Red" + }, + "timestamp": { + "pattern": "(\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2})", + "color": "Grey" + } + } + } +} \ No newline at end of file diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index a7cce7f..c22b581 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -1,5 +1,5 @@ import inspect -from typing import Protocol +from typing import Any, Protocol from dataclasses import make_dataclass from pydantic import BaseModel @@ -16,7 +16,7 @@ def __new__(cls, name: str, bases: tuple, dct: dict[str]): cls = super().__new__(cls, name, bases, dct) if "_build_order" not in dct: - components = {} + components: dict[str, Any] = {} # adds components from base classes (including cls) for base in reversed(inspect.getmro(cls)[:-1]): for k, v in vars(base).items(): @@ -51,25 +51,27 @@ def _build(cls) -> NodeContainer: # print(comp_name) if not callable(comp): - print(f"Treating {comp_name} as a literal") + print(f"Treating {comp_name} as a constant") components[comp_name] = comp continue - if issubclass(comp, BaseModel): + if isinstance(comp, type) and issubclass(comp, BaseModel): print(f"Treating {comp_name} as a pydantic model") components[comp_name] = comp continue + # else: callable, and not a basemodel + sig = inspect.signature(comp) required_comps = [] for name, param in sig.parameters.items(): required_comps.append((name, param.annotation)) - if len(required_comps) == 0: - s = comp_name - else: - s = f"{comp_name} -> {', '.join([name for name, _type in required_comps])}" + # if len(required_comps) == 0: + # s = comp_name + # else: + # s = f"{comp_name} -> {', '.join([name for name, _type in required_comps])}" # print(s.replace("graph", "_graph"), end=";\n") diff --git a/src/koi_net/entrypoints/server.py b/src/koi_net/entrypoints/server.py index c299f9a..c10ed74 100644 --- a/src/koi_net/entrypoints/server.py +++ b/src/koi_net/entrypoints/server.py @@ -75,7 +75,8 @@ async def endpoint(req): def protocol_error_handler(self, request, exc: ProtocolError): """Catches `ProtocolError` and returns as `ErrorResponse`.""" - log.info(f"caught protocol error: {exc}") + # log.info(f"caught protocol error: {exc}") + log.error(exc) resp = ErrorResponse(error=exc.error_type) log.info(f"returning error response: {resp}") return JSONResponse( From f915e037a1ef24654a31f5072ab43c087fcceae9 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Fri, 7 Nov 2025 11:07:57 -0500 Subject: [PATCH 22/53] removed cli tools, moving to 'cli' branch, won't be a part of v1.2 --- src/koi_net/cli/__init__.py | 1 - src/koi_net/cli/commands.py | 99 ------------------------------------- src/koi_net/cli/models.py | 41 --------------- 3 files changed, 141 deletions(-) delete mode 100644 src/koi_net/cli/__init__.py delete mode 100644 src/koi_net/cli/commands.py delete mode 100644 src/koi_net/cli/models.py diff --git a/src/koi_net/cli/__init__.py b/src/koi_net/cli/__init__.py deleted file mode 100644 index c032984..0000000 --- a/src/koi_net/cli/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .commands import app \ No newline at end of file diff --git a/src/koi_net/cli/commands.py b/src/koi_net/cli/commands.py deleted file mode 100644 index dc73541..0000000 --- a/src/koi_net/cli/commands.py +++ /dev/null @@ -1,99 +0,0 @@ -import os -import typer -from typing import Callable -from rich.console import Console -from rich.table import Table - -from importlib.metadata import entry_points - -from koi_net.cli.models import KoiNetworkConfig -from koi_net.core import NodeInterface -import shutil - -app = typer.Typer() -console = Console() - -installed_nodes = entry_points(group='koi_net.node') - -net_config = KoiNetworkConfig.load_from_yaml() - -@app.command() -def list_node_types(): - table = Table(title="installed node types") - table.add_column("name", style="cyan") - table.add_column("module", style="magenta") - - for node in installed_nodes: - table.add_row(node.name, node.module) - console.print(table) - -@app.command() -def list_nodes(): - table = Table(title="created nodes") - table.add_column("name", style="cyan") - table.add_column("rid", style="magenta") - - for dir in os.listdir('.'): - if not os.path.isdir(dir): - continue - for file in os.listdir(dir): - file_path = os.path.join(dir, file) - if not (os.path.isfile(file_path) and file == "config.yaml"): - continue - - print(os.getcwd()) - os.chdir(dir) - print(os.getcwd()) - - node_type = net_config.nodes.get(dir) - - ep = list(installed_nodes.select(name=node_type))[0] - create_node: Callable[[], NodeInterface] = ep.load() - - node = create_node() - - print(ep) - print(dir) - print(node.identity.rid) - - table.add_row(dir, str(node.identity.rid)) - - os.chdir('..') - print(os.getcwd()) - - console.print(table) - -@app.command() -def create(type: str, name: str): - # if name not in installed_nodes: - # console.print(f"[bold red]Error:[/bold red] node type '{name}' doesn't exist") - # raise typer.Exit(code=1) - - eps = installed_nodes.select(name=type) - if eps: - ep = list(eps)[0] - - os.mkdir(name) - os.chdir(name) - - ep.load() - - os.chdir('..') - - net_config.nodes[name] = type - net_config.save_to_yaml() - -@app.command() -def remove(name: str): - shutil.rmtree(name) - net_config.nodes.pop(name, None) - net_config.save_to_yaml() - -@app.command() -def start(name: str): - os.chdir(name) - node_type = net_config.nodes.get(name) - ep = list(installed_nodes.select(name=node_type))[0] - create_node: Callable[[], NodeInterface] = ep.load() - - create_node().server.run() \ No newline at end of file diff --git a/src/koi_net/cli/models.py b/src/koi_net/cli/models.py deleted file mode 100644 index 2cdae5b..0000000 --- a/src/koi_net/cli/models.py +++ /dev/null @@ -1,41 +0,0 @@ -from pydantic import BaseModel, PrivateAttr -from ruamel.yaml import YAML - - -class KoiNetworkConfig(BaseModel): - nodes: dict[str, str] = {} - _file_path: str = PrivateAttr(default="koi-net-config.yaml") - - @classmethod - def load_from_yaml( - cls, - file_path: str = "koi-net-config.yaml", - ): - yaml = YAML() - - try: - with open(file_path, "r") as f: - file_content = f.read() - config_data = yaml.load(file_content) - config = cls.model_validate(config_data) - - except FileNotFoundError: - config = cls() - - config._file_path = file_path - config.save_to_yaml() - return config - - def save_to_yaml(self): - yaml = YAML() - - with open(self._file_path, "w") as f: - try: - config_data = self.model_dump(mode="json") - yaml.dump(config_data, f) - except Exception as e: - if self._file_content: - f.seek(0) - f.truncate() - f.write(self._file_content) - raise e \ No newline at end of file From e934ba2f2b59db52f25bd6e3420d91ee68b89bf0 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Fri, 7 Nov 2025 15:12:41 -0500 Subject: [PATCH 23/53] fixing coordinator catch up, no longer tries to query partial nodes or nodes that don't provide orn:koi-net.node state --- src/koi_net/lifecycle.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index f648b98..a963e3c 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -4,6 +4,8 @@ from rid_lib.ext import Bundle, Cache from rid_lib.types import KoiNetNode +from koi_net.protocol.node import NodeProfile, NodeType + from .handshaker import Handshaker from .network.request_handler import RequestHandler from .workers.kobj_worker import KnowledgeProcessingWorker @@ -90,9 +92,7 @@ def start(self): graph from nodes and edges in cache. Processes any state changes of node bundle. Initiates handshake with first contact if node doesn't have any neighbors. Catches up with coordinator state. - """ - log.info("Starting processor worker thread") - + """ self.kobj_worker.thread.start() self.event_worker.thread.start() self.graph.generate() @@ -107,12 +107,20 @@ def start(self): log.debug("Waiting for kobj queue to empty") self.kobj_queue.q.join() - coordinators = self.graph.get_neighbors(direction="in", allowed_type=KoiNetNode) + neighbors = self.graph.get_neighbors(direction="in", allowed_type=KoiNetNode) - if len(coordinators) > 0: - for coordinator in coordinators: + if len(neighbors) > 0: + for node in neighbors: + node_bundle = self.cache.read(node) + node_profile = node_bundle.validate_contents(NodeProfile) + + if KoiNetNode not in node_profile.provides.state: + continue + if node_profile.node_type != NodeType.FULL: + continue + payload = self.request_handler.fetch_manifests( - node=coordinator, + node=node, rid_types=[KoiNetNode] ) if type(payload) is ErrorResponse: @@ -121,7 +129,7 @@ def start(self): for manifest in payload.manifests: self.kobj_queue.push( manifest=manifest, - source=coordinator + source=node ) elif self.config.koi_net.first_contact.rid: From 09d39628c67d76058e79c4e8c78a3f3dd943ed88 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Fri, 7 Nov 2025 15:12:57 -0500 Subject: [PATCH 24/53] removed cli entrypoint --- pyproject.toml | 3 --- 1 file changed, 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 975029a..9495bbc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,8 +35,5 @@ docs = [ "sphinx-rtd-theme>=3.0.2", ] -[project.scripts] -koi = "koi_net.cli:app" - [project.urls] Homepage = "https://github.com/BlockScience/koi-net/" From e07e6aa740bf6a6299c239c9e4b4385cc0f824a3 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Thu, 13 Nov 2025 14:58:39 -0500 Subject: [PATCH 25/53] moved catch up function in lifecycle to a sync manager component, decomposed assembler build function into three steps, added a dependency graph visualizer, updated LNAV config --- koi-net.config.json | 29 ++++++----- src/koi_net/assembler.py | 97 ++++++++++++++++++++++++------------- src/koi_net/core.py | 2 + src/koi_net/lifecycle.py | 43 +++------------- src/koi_net/sync_manager.py | 63 ++++++++++++++++++++++++ 5 files changed, 151 insertions(+), 83 deletions(-) create mode 100644 src/koi_net/sync_manager.py diff --git a/koi-net.config.json b/koi-net.config.json index 4c99b8e..4a26945 100644 --- a/koi-net.config.json +++ b/koi-net.config.json @@ -18,35 +18,30 @@ { "field": "level", "auto-width": true, - "default-value": "", "prefix": "[", "suffix": "]" }, " ", { - "field": "event", - "min-width": 70, - "max-width": 70, - "overflow": "truncate" + "field": "module", + "min-width": 15, + "max-width": 15, + "overflow": "dot-dot", + "align": "right" }, " - ", { - "field": "logger", - "overflow": "abbrev", - "min-width": 30, - "max-width": 30, - "default-value": "", - "align": "right" + "field": "event" } ], "value": { "level": { "kind": "string" }, - "logger": { + "event": { "kind": "string" }, - "event": { + "module": { "kind": "string" } }, @@ -70,6 +65,14 @@ "timestamp": { "pattern": "(\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2})", "color": "Grey" + }, + "obj_name": { + "pattern": "<([a-zA-Z.]+)", + "color": "Purple" + }, + "quotations": { + "pattern": "(['\"][^']*['\"])", + "color": "Green" } } } diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index c22b581..21c0c72 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -1,4 +1,7 @@ +from enum import StrEnum import inspect +from matplotlib import pyplot as plt +import networkx as nx from typing import Any, Protocol from dataclasses import make_dataclass @@ -9,6 +12,9 @@ log = structlog.stdlib.get_logger() +class CompType(StrEnum): + FACTORY = "FACTORY" + OBJECT = "OBJECT" class BuildOrderer(type): def __new__(cls, name: str, bases: tuple, dct: dict[str]): @@ -37,52 +43,75 @@ class NodeContainer(Protocol): class NodeAssembler(metaclass=BuildOrderer): def __new__(self) -> NodeContainer: - return self._build() + return self._build_node() @classmethod - def _build(cls) -> NodeContainer: - components = {} + def _build_deps(cls) -> dict: + dep_graph: dict[str, tuple[CompType, list[str]]] = {} for comp_name in cls._build_order: - comp = getattr(cls, comp_name, None) - - if comp is None: - raise Exception(f"Couldn't find factory for component '{comp_name}'") - - # print(comp_name) + try: + comp = getattr(cls, comp_name) + except AttributeError: + raise Exception(f"Component '{comp_name}' not found in class definition") if not callable(comp): - print(f"Treating {comp_name} as a constant") - components[comp_name] = comp - continue + comp_type = CompType.OBJECT + dep_names = [] - if isinstance(comp, type) and issubclass(comp, BaseModel): - print(f"Treating {comp_name} as a pydantic model") - components[comp_name] = comp - continue + elif isinstance(comp, type) and issubclass(comp, BaseModel): + comp_type = CompType.OBJECT + dep_names = [] - # else: callable, and not a basemodel + else: + sig = inspect.signature(comp) + comp_type = CompType.FACTORY + dep_names = list(sig.parameters) + + dep_graph[comp_name] = (comp_type, dep_names) - sig = inspect.signature(comp) + print(f"{comp_name} ({comp_type}) -> {dep_names}") - required_comps = [] - for name, param in sig.parameters.items(): - required_comps.append((name, param.annotation)) + return dep_graph - # if len(required_comps) == 0: - # s = comp_name - # else: - # s = f"{comp_name} -> {', '.join([name for name, _type in required_comps])}" + @classmethod + def _visualize(cls): + dep_graph = cls._build_deps() + dg = nx.DiGraph() + + nx.complete_graph + + for node, (_, neighbors) in dep_graph.items(): + for n in neighbors: + dg.add_edge(node, n) + + nx.draw_spring(dg) + plt.show() + + @classmethod + def _build_comps(cls) -> dict[str, Any]: + components: dict[str, Any] = {} + + dep_graph = cls._build_deps() + for comp_name, (comp_type, dep_names) in dep_graph.items(): + comp = getattr(cls, comp_name, None) - # print(s.replace("graph", "_graph"), end=";\n") + if comp_type == CompType.OBJECT: + components[comp_name] = comp - dependencies = {} - for req_comp_name, req_comp_type in required_comps: - if req_comp_name not in components: - raise Exception(f"Couldn't find required component '{req_comp_name}'") - - dependencies[req_comp_name] = components[req_comp_name] - - components[comp_name] = comp(**dependencies) + elif comp_type == CompType.FACTORY: + # builds depedency dict for current component + dependencies = {} + for dep_name in dep_names: + if dep_name not in components: + raise Exception(f"Couldn't find required component '{dep_name}'") + dependencies[dep_name] = components[dep_name] + components[comp_name] = comp(**dependencies) + + return components + + @classmethod + def _build_node(cls) -> NodeContainer: + components = cls._build_comps() NodeContainer = make_dataclass( cls_name="NodeContainer", diff --git a/src/koi_net/core.py b/src/koi_net/core.py index c5521b0..ef535c3 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -7,6 +7,7 @@ from .processor.context import HandlerContext from .effector import Effector from .handshaker import Handshaker +from .sync_manager import SyncManager from .identity import NodeIdentity from .workers import KnowledgeProcessingWorker, EventProcessingWorker from .lifecycle import NodeLifecycle @@ -56,6 +57,7 @@ class BaseNode(NodeAssembler): handshaker = Handshaker error_handler = ErrorHandler request_handler = RequestHandler + sync_manager = SyncManager response_handler = ResponseHandler resolver = NetworkResolver effector = Effector diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index a963e3c..7d1df8a 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -1,17 +1,13 @@ import structlog from contextlib import contextmanager, asynccontextmanager -from rid_lib.ext import Bundle, Cache -from rid_lib.types import KoiNetNode - -from koi_net.protocol.node import NodeProfile, NodeType +from rid_lib.ext import Bundle +from .sync_manager import SyncManager from .handshaker import Handshaker -from .network.request_handler import RequestHandler from .workers.kobj_worker import KnowledgeProcessingWorker from .network.event_queue import EventQueue from .workers import EventProcessingWorker -from .protocol.api_models import ErrorResponse from .workers.base import STOP_WORKER from .config.core import NodeConfig from .processor.kobj_queue import KobjQueue @@ -31,9 +27,8 @@ class NodeLifecycle: kobj_worker: KnowledgeProcessingWorker event_queue: EventQueue event_worker: EventProcessingWorker - cache: Cache handshaker: Handshaker - request_handler: RequestHandler + sync_manager: SyncManager def __init__( self, @@ -44,9 +39,8 @@ def __init__( kobj_worker: KnowledgeProcessingWorker, event_queue: EventQueue, event_worker: EventProcessingWorker, - cache: Cache, handshaker: Handshaker, - request_handler: RequestHandler + sync_manager: SyncManager ): self.config = config self.identity = identity @@ -55,9 +49,8 @@ def __init__( self.kobj_worker = kobj_worker self.event_queue = event_queue self.event_worker = event_worker - self.cache = cache self.handshaker = handshaker - self.request_handler = request_handler + self.sync_manager = sync_manager @contextmanager def run(self): @@ -107,31 +100,9 @@ def start(self): log.debug("Waiting for kobj queue to empty") self.kobj_queue.q.join() - neighbors = self.graph.get_neighbors(direction="in", allowed_type=KoiNetNode) + if self.sync_manager.catch_up_with_coordinators(): + pass - if len(neighbors) > 0: - for node in neighbors: - node_bundle = self.cache.read(node) - node_profile = node_bundle.validate_contents(NodeProfile) - - if KoiNetNode not in node_profile.provides.state: - continue - if node_profile.node_type != NodeType.FULL: - continue - - payload = self.request_handler.fetch_manifests( - node=node, - rid_types=[KoiNetNode] - ) - if type(payload) is ErrorResponse: - continue - - for manifest in payload.manifests: - self.kobj_queue.push( - manifest=manifest, - source=node - ) - elif self.config.koi_net.first_contact.rid: log.debug(f"I don't have any edges with coordinators, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") diff --git a/src/koi_net/sync_manager.py b/src/koi_net/sync_manager.py new file mode 100644 index 0000000..74b6f62 --- /dev/null +++ b/src/koi_net/sync_manager.py @@ -0,0 +1,63 @@ +from rid_lib.ext import Cache +from rid_lib.types import KoiNetNode + +from koi_net.network.graph import NetworkGraph +from koi_net.network.request_handler import RequestHandler +from koi_net.processor.kobj_queue import KobjQueue +from .protocol.api_models import ErrorResponse +from .protocol.node import NodeProfile, NodeType + + +class SyncManager: + graph: NetworkGraph + cache: Cache + request_handler: RequestHandler + kobj_queue: KobjQueue + + def __init__( + self, + graph: NetworkGraph, + cache: Cache, + request_handler: RequestHandler, + kobj_queue: KobjQueue + ): + self.graph = graph + self.cache = cache + self.request_handler = request_handler + self.kobj_queue = kobj_queue + + def catch_up_with_coordinators(self) -> bool: + return self.catch_up_with( + nodes=self.graph.get_neighbors( + direction="in", + allowed_type=KoiNetNode + ), + rid_types=[KoiNetNode] + ) + + def catch_up_with(self, nodes, rid_types) -> bool: + # get all of the nodes such that, (node) -[orn:koi-net.node]-> (me) + # node providers that I am subscribed to + if not nodes: + return False + + for node in nodes: + node_bundle = self.cache.read(node) + node_profile = node_bundle.validate_contents(NodeProfile) + + if node_profile.node_type != NodeType.FULL: + continue + + payload = self.request_handler.fetch_manifests( + node, rid_types=rid_types) + + if type(payload) is ErrorResponse: + continue + + for manifest in payload.manifests: + self.kobj_queue.push( + manifest=manifest, + source=node + ) + + return True \ No newline at end of file From 592dd2a417106738492185eadb7b24e81a9ffda1 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Fri, 14 Nov 2025 14:25:11 -0500 Subject: [PATCH 26/53] cleaned up knowledge handler class, added doc strings to assembler --- src/koi_net/assembler.py | 66 ++++++++++++++++++++----------- src/koi_net/processor/handler.py | 16 +++++--- src/koi_net/processor/pipeline.py | 2 +- 3 files changed, 55 insertions(+), 29 deletions(-) diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 21c0c72..aaa22c7 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -1,7 +1,5 @@ from enum import StrEnum import inspect -from matplotlib import pyplot as plt -import networkx as nx from typing import Any, Protocol from dataclasses import make_dataclass @@ -12,6 +10,7 @@ log = structlog.stdlib.get_logger() + class CompType(StrEnum): FACTORY = "FACTORY" OBJECT = "OBJECT" @@ -43,12 +42,23 @@ class NodeContainer(Protocol): class NodeAssembler(metaclass=BuildOrderer): def __new__(self) -> NodeContainer: - return self._build_node() + """Returns assembled node container.""" + return self._build() @classmethod - def _build_deps(cls) -> dict: - dep_graph: dict[str, tuple[CompType, list[str]]] = {} - for comp_name in cls._build_order: + def _build_deps( + cls, + build_order: list[str] + ) -> dict[str, tuple[CompType, list[str]]]: + """Returns dependency graph for components defined in `cls_build_order`. + + Graph representation is a dict where each key is a component name, + and the value is tuple containing the component type, and a list + of dependency component names. + """ + + dep_graph = {} + for comp_name in build_order: try: comp = getattr(cls, comp_name) except AttributeError: @@ -72,26 +82,29 @@ def _build_deps(cls) -> dict: print(f"{comp_name} ({comp_type}) -> {dep_names}") return dep_graph - - @classmethod - def _visualize(cls): - dep_graph = cls._build_deps() - dg = nx.DiGraph() - nx.complete_graph + @classmethod + def _visualize(cls, dep_graph) -> str: + """Returns representation of dependency graph in Graphviz DOT language.""" + dep_graph = cls._build_deps(cls._build_order) + s = "digraph G {\n" for node, (_, neighbors) in dep_graph.items(): - for n in neighbors: - dg.add_edge(node, n) - - nx.draw_spring(dg) - plt.show() + sub_s = node + if neighbors: + sub_s += f"-> {', '.join(neighbors)}" + sub_s = sub_s.replace("graph", "graph_") + ";" + s += " " * 4 + sub_s + "\n" + s += "}" + return s @classmethod - def _build_comps(cls) -> dict[str, Any]: + def _build_comps( + cls, + dep_graph: dict[str, tuple[CompType, list[str]]] + ) -> dict[str, Any]: + """Returns assembled components from dependency graph.""" components: dict[str, Any] = {} - - dep_graph = cls._build_deps() for comp_name, (comp_type, dep_names) in dep_graph.items(): comp = getattr(cls, comp_name, None) @@ -110,9 +123,8 @@ def _build_comps(cls) -> dict[str, Any]: return components @classmethod - def _build_node(cls) -> NodeContainer: - components = cls._build_comps() - + def _build_node(cls, components: dict[str, Any]) -> NodeContainer: + """Returns node container from components.""" NodeContainer = make_dataclass( cls_name="NodeContainer", fields=[ @@ -124,3 +136,11 @@ def _build_node(cls) -> NodeContainer: ) return NodeContainer(**components) + + @classmethod + def _build(cls) -> NodeContainer: + """Returns node container after calling full build process.""" + dep_graph = cls._build_deps(cls._build_order) + comps = cls._build_comps(dep_graph) + node = cls._build_node(comps) + return node \ No newline at end of file diff --git a/src/koi_net/processor/handler.py b/src/koi_net/processor/handler.py index 9bdb07e..af62944 100644 --- a/src/koi_net/processor/handler.py +++ b/src/koi_net/processor/handler.py @@ -37,15 +37,22 @@ class KnowledgeHandler: func: Callable[[HandlerContext, KnowledgeObject], None | KnowledgeObject | StopChain] handler_type: HandlerType - rid_types: list[RIDType] | None - event_types: list[EventType | None] | None = None + rid_types: tuple[RIDType] + event_types: tuple[EventType | None] + + def __call__( + self, + ctx: HandlerContext, + kobj: KnowledgeObject + ) -> None | KnowledgeObject | StopChain: + return self.func(ctx, kobj) @classmethod def create( cls, handler_type: HandlerType, - rid_types: list[RIDType] | None = None, - event_types: list[EventType | None] | None = None + rid_types: tuple[RIDType] = (), + event_types: tuple[EventType | None] = () ): """Decorator wraps a function, returns a KnowledgeHandler. @@ -57,4 +64,3 @@ def decorator(func: Callable) -> KnowledgeHandler: handler = cls(func, handler_type, rid_types, event_types) return handler return decorator - diff --git a/src/koi_net/processor/pipeline.py b/src/koi_net/processor/pipeline.py index 576fa6d..20e3a8f 100644 --- a/src/koi_net/processor/pipeline.py +++ b/src/koi_net/processor/pipeline.py @@ -70,7 +70,7 @@ def call_handler_chain( log.debug(f"Calling {handler_type} handler '{handler.func.__name__}'") - resp = handler.func( + resp = handler( ctx=self.handler_context, kobj=kobj.model_copy() ) From a35f12c9e878600511a5f2cee3fe70785ddc87bd Mon Sep 17 00:00:00 2001 From: lukvmil Date: Fri, 14 Nov 2025 15:09:36 -0500 Subject: [PATCH 27/53] pass on config modules - improved logging and docs --- src/koi_net/config/core.py | 38 +++++++++++++++++++++++++----- src/koi_net/config/full_node.py | 20 +++++++++++++--- src/koi_net/config/loader.py | 16 +++++++------ src/koi_net/config/partial_node.py | 18 ++++++++++---- 4 files changed, 71 insertions(+), 21 deletions(-) diff --git a/src/koi_net/config/core.py b/src/koi_net/config/core.py index 248bdc1..eaf0fa9 100644 --- a/src/koi_net/config/core.py +++ b/src/koi_net/config/core.py @@ -3,17 +3,20 @@ from dotenv import load_dotenv from rid_lib import RIDType from rid_lib.types import KoiNetNode +import structlog from koi_net.protocol.secure import PrivateKey from ..protocol.node import NodeProfile +log = structlog.stdlib.get_logger() + class NodeContact(BaseModel): rid: KoiNetNode | None = None url: str | None = None class KoiNetConfig(BaseModel): - """Config for KOI-net.""" + """Config for KOI-net parameters.""" node_name: str node_rid: KoiNetNode | None = None @@ -53,19 +56,42 @@ def __getattribute__(self, name): return value class NodeConfig(BaseModel): + """Base node config class, intended to be extended.""" + koi_net: KoiNetConfig env: EnvConfig = EnvConfig() @model_validator(mode="after") def generate_rid_cascade(self): - if not self.koi_net.node_rid: + """Generates node RID if missing.""" + if self.koi_net.node_rid: + return self + + log.debug("Node RID not found in config, attempting to generate") + + try: + # attempts to read existing private key PEM file + with open(self.koi_net.private_key_pem_path, "r") as f: + priv_key_pem = f.read() + priv_key = PrivateKey.from_pem( + priv_key_pem, + password=self.env.priv_key_password) + log.debug("Used existing private key from PEM file") + + except FileNotFoundError: + # generates new private key if PEM not found priv_key = PrivateKey.generate() - pub_key = priv_key.public_key() - - self.koi_net.node_rid = pub_key.to_node_rid(self.koi_net.node_name) with open(self.koi_net.private_key_pem_path, "w") as f: f.write(priv_key.to_pem(self.env.priv_key_password)) - + log.debug("Generated new private key, no PEM file found") + + pub_key = priv_key.public_key() + self.koi_net.node_rid = pub_key.to_node_rid(self.koi_net.node_name) + log.debug(f"Node RID set to {self.koi_net.node_rid}") + + if self.koi_net.node_profile.public_key != pub_key.to_der(): self.koi_net.node_profile.public_key = pub_key.to_der() + log.warning("New private key overwrote old public key!") + return self \ No newline at end of file diff --git a/src/koi_net/config/full_node.py b/src/koi_net/config/full_node.py index 6fffdfc..7555473 100644 --- a/src/koi_net/config/full_node.py +++ b/src/koi_net/config/full_node.py @@ -1,16 +1,28 @@ from pydantic import BaseModel, model_validator -from koi_net.config.core import NodeConfig, KoiNetConfig as BaseKoiNetConfig -from ..protocol.node import NodeProfile as BaseNodeProfile, NodeType, NodeProvides +from .core import NodeConfig, KoiNetConfig as BaseKoiNetConfig +from ..protocol.node import ( + NodeProfile as BaseNodeProfile, + NodeType, + NodeProvides +) class NodeProfile(BaseNodeProfile): + """Node profile config class for full nodes.""" node_type: NodeType = NodeType.FULL class KoiNetConfig(BaseKoiNetConfig): + """KOI-net config class for full nodes.""" node_profile: NodeProfile class ServerConfig(BaseModel): - """Config for the node server (full node only).""" + """Server config for full nodes. + + The parameters in this class represent how a server should be hosted, + not accessed. For example, a node may host a server at + `http://127.0.0.1:8000/koi-net`, but serve through nginx at + `https://example.com/koi-net`. + """ host: str = "127.0.0.1" port: int = 8000 @@ -21,11 +33,13 @@ def url(self) -> str: return f"http://{self.host}:{self.port}{self.path or ''}" class FullNodeConfig(NodeConfig): + """Node config class for full nodes.""" koi_net: KoiNetConfig server: ServerConfig = ServerConfig() @model_validator(mode="after") def check_url(self): + """Generates base URL if missing from node profile.""" if not self.koi_net.node_profile.base_url: self.koi_net.node_profile.base_url = self.server.url return self diff --git a/src/koi_net/config/loader.py b/src/koi_net/config/loader.py index 92ceced..d653541 100644 --- a/src/koi_net/config/loader.py +++ b/src/koi_net/config/loader.py @@ -3,35 +3,36 @@ class ConfigLoader: + """Loads node config from a YAML file, and proxies access to it.""" _config: NodeConfig _file_path: str = "config.yaml" _file_content: str def __init__(self, config_cls: type[NodeConfig]): - self._config_cls = config_cls - self.load_from_yaml() + self.load_from_yaml(config_cls) def __getattr__(self, name): + """Proxies attribute lookups to internal config object.""" return getattr(self._config, name) - def load_from_yaml(self): - """Loads config state from YAML file.""" + def load_from_yaml(self, config_cls: type[NodeConfig]): + """Loads config from YAML file, or generates it if missing.""" yaml = YAML() try: with open(self._file_path, "r") as f: self._file_content = f.read() config_data = yaml.load(self._file_content) - self._config = self._config_cls.model_validate(config_data) + self._config = config_cls.model_validate(config_data) except FileNotFoundError: - self._config = self._config_cls() + self._config = config_cls() self.save_to_yaml() - def save_to_yaml(self): + """Saves config to YAML file.""" yaml = YAML() with open(self._file_path, "w") as f: @@ -39,6 +40,7 @@ def save_to_yaml(self): config_data = self._config.model_dump(mode="json") yaml.dump(config_data, f) except Exception as e: + # rewrites original content if YAML dump fails if self._file_content: f.seek(0) f.truncate() diff --git a/src/koi_net/config/partial_node.py b/src/koi_net/config/partial_node.py index 1554126..a9aa55b 100644 --- a/src/koi_net/config/partial_node.py +++ b/src/koi_net/config/partial_node.py @@ -1,18 +1,26 @@ from pydantic import BaseModel -from koi_net.config.core import NodeConfig, KoiNetConfig -from ..protocol.node import NodeProfile, NodeType, NodeProvides +from .core import NodeConfig, KoiNetConfig as BaseKoiNetConfig +from ..protocol.node import ( + NodeProfile as BaseNodeProfile, + NodeType, + NodeProvides +) -class NodeProfile(NodeProfile): +class NodeProfile(BaseNodeProfile): + """Node profile config class for partial nodes.""" base_url: str | None = None node_type: NodeType = NodeType.PARTIAL -class KoiNetConfig(KoiNetConfig): - node_profile: NodeProfile +class KoiNetConfig(BaseKoiNetConfig): + """KOI-net config class for partial nodes.""" + node_profile: NodeProfile class PollerConfig(BaseModel): + """Poller config for partial nodes.""" polling_interval: int = 5 class PartialNodeConfig(NodeConfig): + """Node config class for partial nodes.""" koi_net: KoiNetConfig poller: PollerConfig = PollerConfig() \ No newline at end of file From 15004caa19f295f019ac677e1b7ff974608ce335 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Fri, 14 Nov 2025 16:15:16 -0500 Subject: [PATCH 28/53] update documentation, logging, organization from entry point components --- src/koi_net/assembler.py | 2 +- src/koi_net/entrypoints/base.py | 9 +++-- src/koi_net/entrypoints/poller.py | 9 +++-- src/koi_net/entrypoints/server.py | 46 ++++++++++++------------- src/koi_net/network/graph.py | 4 +-- src/koi_net/network/resolver.py | 35 ++++++++++--------- src/koi_net/network/response_handler.py | 17 +++++---- src/koi_net/secure.py | 2 ++ 8 files changed, 64 insertions(+), 60 deletions(-) diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index aaa22c7..1a54f80 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -79,7 +79,7 @@ def _build_deps( dep_graph[comp_name] = (comp_type, dep_names) - print(f"{comp_name} ({comp_type}) -> {dep_names}") + # print(f"{comp_name} ({comp_type}) -> {dep_names}") return dep_graph diff --git a/src/koi_net/entrypoints/base.py b/src/koi_net/entrypoints/base.py index 3e58fd2..ed6c504 100644 --- a/src/koi_net/entrypoints/base.py +++ b/src/koi_net/entrypoints/base.py @@ -1,5 +1,8 @@ -from koi_net.config.core import NodeConfig +from abc import ABC, abstractmethod -class EntryPoint: - def run(self): ... \ No newline at end of file +class EntryPoint(ABC): + """Abstract class for entry point components.""" + @abstractmethod + def run(self): + ... \ No newline at end of file diff --git a/src/koi_net/entrypoints/poller.py b/src/koi_net/entrypoints/poller.py index 35c6d2d..4d8d389 100644 --- a/src/koi_net/entrypoints/poller.py +++ b/src/koi_net/entrypoints/poller.py @@ -12,7 +12,7 @@ class NodePoller(EntryPoint): - """Manages polling based event loop for partial nodes.""" + """Entry point for partial nodes, manages polling event loop.""" kobj_queue: KobjQueue lifecycle: NodeLifecycle resolver: NetworkResolver @@ -31,10 +31,9 @@ def __init__( self.config = config def poll(self): - """Polls neighbors and processes returned events.""" - neighbors = self.resolver.poll_neighbors() - for node_rid in neighbors: - for event in neighbors[node_rid]: + """Polls neighbor nodes and processes returned events.""" + for node_rid, events in self.resolver.poll_neighbors().items(): + for event in events: self.kobj_queue.push(event=event, source=node_rid) def run(self): diff --git a/src/koi_net/entrypoints/server.py b/src/koi_net/entrypoints/server.py index c10ed74..d57264c 100644 --- a/src/koi_net/entrypoints/server.py +++ b/src/koi_net/entrypoints/server.py @@ -16,7 +16,7 @@ class NodeServer(EntryPoint): - """Manages FastAPI server and event handling for full nodes.""" + """Entry point for full nodes, manages FastAPI server.""" config: FullNodeConfig lifecycle: NodeLifecycle response_handler: ResponseHandler @@ -33,24 +33,9 @@ def __init__( self.lifecycle = lifecycle self.response_handler = response_handler self._build_app() - - def _build_app(self): - """Builds FastAPI app and adds endpoints.""" - @asynccontextmanager - async def lifespan(*args, **kwargs): - async with self.lifecycle.async_run(): - yield - - self.app = FastAPI( - lifespan=lifespan, - title="KOI-net Protocol API", - version="1.0.0" - ) - - self.app.add_exception_handler(ProtocolError, self.protocol_error_handler) - - self.router = APIRouter(prefix="/koi-net") - + + def _build_endpoints(self, router: APIRouter): + """Builds endpoints for API router.""" for path, models in API_MODEL_MAP.items(): def create_endpoint(path: str): async def endpoint(req): @@ -64,21 +49,36 @@ async def endpoint(req): return endpoint - self.router.add_api_route( + router.add_api_route( path=path, endpoint=create_endpoint(path), methods=["POST"], response_model_exclude_none=True ) + + def _build_app(self): + """Builds FastAPI app.""" + @asynccontextmanager + async def lifespan(*args, **kwargs): + async with self.lifecycle.async_run(): + yield + self.app = FastAPI( + lifespan=lifespan, + title="KOI-net Protocol API", + version="1.1.0" + ) + + self.app.add_exception_handler(ProtocolError, self.protocol_error_handler) + self.router = APIRouter(prefix="/koi-net") + self._build_endpoints(self.router) self.app.include_router(self.router) def protocol_error_handler(self, request, exc: ProtocolError): - """Catches `ProtocolError` and returns as `ErrorResponse`.""" - # log.info(f"caught protocol error: {exc}") + """Catches `ProtocolError` and returns an `ErrorResponse` payload.""" log.error(exc) resp = ErrorResponse(error=exc.error_type) - log.info(f"returning error response: {resp}") + log.info(f"Returning error response: {resp}") return JSONResponse( status_code=400, content=resp.model_dump(mode="json") diff --git a/src/koi_net/network/graph.py b/src/koi_net/network/graph.py index 0dac50d..7f76a0e 100644 --- a/src/koi_net/network/graph.py +++ b/src/koi_net/network/graph.py @@ -112,6 +112,6 @@ def get_neighbors( neighbors.add(edge_profile.source) elif edge_profile.source == self.identity.rid: neighbors.add(edge_profile.target) - + return list(neighbors) - + diff --git a/src/koi_net/network/resolver.py b/src/koi_net/network/resolver.py index ef61d7a..065d1e6 100644 --- a/src/koi_net/network/resolver.py +++ b/src/koi_net/network/resolver.py @@ -104,27 +104,28 @@ def fetch_remote_manifest(self, rid: RID) -> tuple[Bundle | None, KoiNetNode | N def poll_neighbors(self) -> dict[KoiNetNode, list[Event]]: """Polls all neighbor nodes and returns compiled list of events. - Neighbor nodes also include the first contact, regardless of - whether the first contact profile is known to this node. - """ + Neighbor nodes include any node this node shares an edge with, + or the first contact, if no neighbors are found. - graph_neighbors = self.graph.get_neighbors() - neighbors = [] + NOTE: This function does not poll nodes that don't share edges + with this node. Events sent by non neighboring nodes will not + be polled. + """ - if graph_neighbors: - for node_rid in graph_neighbors: - node_bundle = self.cache.read(node_rid) - if not node_bundle: - continue - node_profile = node_bundle.validate_contents(NodeProfile) - if node_profile.node_type != NodeType.FULL: - continue - neighbors.append(node_rid) + neighbors: list[KoiNetNode] = [] + for node_rid in self.graph.get_neighbors(): + node_bundle = self.cache.read(node_rid) + if not node_bundle: + continue + node_profile = node_bundle.validate_contents(NodeProfile) + if node_profile.node_type != NodeType.FULL: + continue + neighbors.append(node_rid) - elif self.config.koi_net.first_contact.rid: + if not neighbors and self.config.koi_net.first_contact.rid: neighbors.append(self.config.koi_net.first_contact.rid) - event_dict = dict() + event_dict: dict[KoiNetNode, list[Event]] = {} for node_rid in neighbors: try: payload = self.request_handler.poll_events( @@ -134,7 +135,7 @@ def poll_neighbors(self) -> dict[KoiNetNode, list[Event]]: if type(payload) == ErrorResponse: continue - + if payload.events: log.debug(f"Received {len(payload.events)} events from {node_rid!r}") diff --git a/src/koi_net/network/response_handler.py b/src/koi_net/network/response_handler.py index ff0656c..38f43ec 100644 --- a/src/koi_net/network/response_handler.py +++ b/src/koi_net/network/response_handler.py @@ -77,8 +77,8 @@ def poll_events_handler( req: PollEvents, source: KoiNetNode ) -> EventsPayload: - log.info(f"Request to poll events") events = self.poll_event_buf.flush(source, limit=req.limit) + log.info(f"Request to poll events, returning {len(events)} event(s)") return EventsPayload(events=events) def fetch_rids_handler( @@ -87,18 +87,16 @@ def fetch_rids_handler( source: KoiNetNode ) -> RidsPayload: """Returns response to fetch RIDs request.""" - log.info(f"Request to fetch rids, allowed types {req.rid_types}") rids = self.cache.list_rids(req.rid_types) - + log.info(f"Request to fetch rids, allowed types {req.rid_types}, returning {len(rids)} RID(s)") return RidsPayload(rids=rids) - def fetch_manifests_handler(self, + def fetch_manifests_handler( + self, req: FetchManifests, source: KoiNetNode ) -> ManifestsPayload: - """Returns response to fetch manifests request.""" - log.info(f"Request to fetch manifests, allowed types {req.rid_types}, rids {req.rids}") - + """Returns response to fetch manifests request.""" manifests: list[Manifest] = [] not_found: list[RID] = [] @@ -109,6 +107,7 @@ def fetch_manifests_handler(self, else: not_found.append(rid) + log.info(f"Request to fetch manifests, allowed types {req.rid_types}, rids {req.rids}, returning {len(manifests)} manifest(s)") return ManifestsPayload(manifests=manifests, not_found=not_found) def fetch_bundles_handler( @@ -117,7 +116,6 @@ def fetch_bundles_handler( source: KoiNetNode ) -> BundlesPayload: """Returns response to fetch bundles request.""" - log.info(f"Request to fetch bundles, requested rids {req.rids}") bundles: list[Bundle] = [] not_found: list[RID] = [] @@ -128,5 +126,6 @@ def fetch_bundles_handler( bundles.append(bundle) else: not_found.append(rid) - + + log.info(f"Request to fetch bundles, requested rids {req.rids}, returning {len(bundles)} bundle(s)") return BundlesPayload(bundles=bundles, not_found=not_found) \ No newline at end of file diff --git a/src/koi_net/secure.py b/src/koi_net/secure.py index da89327..8c73198 100644 --- a/src/koi_net/secure.py +++ b/src/koi_net/secure.py @@ -43,6 +43,8 @@ def __init__( def _load_priv_key(self) -> PrivateKey: """Loads private key from PEM file path in config.""" + + # TODO: handle missing private key with open(self.config.koi_net.private_key_pem_path, "r") as f: priv_key_pem = f.read() From 8c8b40aeb43aa7d64385ab2551284c1e20cbc11e Mon Sep 17 00:00:00 2001 From: lukvmil Date: Sun, 16 Nov 2025 12:53:49 -0500 Subject: [PATCH 29/53] cleaning up network methods, documentation and small code improvements. added a reset for the node network timeout --- src/koi_net/network/error_handler.py | 23 ++++++--- src/koi_net/network/event_queue.py | 6 +-- src/koi_net/network/graph.py | 17 ++++--- src/koi_net/network/poll_event_buffer.py | 13 +++-- src/koi_net/network/request_handler.py | 63 +++++++++++------------- src/koi_net/network/resolver.py | 25 +++++++--- src/koi_net/network/response_handler.py | 2 - 7 files changed, 85 insertions(+), 64 deletions(-) diff --git a/src/koi_net/network/error_handler.py b/src/koi_net/network/error_handler.py index 0ca558c..bdc2c33 100644 --- a/src/koi_net/network/error_handler.py +++ b/src/koi_net/network/error_handler.py @@ -9,7 +9,7 @@ class ErrorHandler: - """Handles network errors that may occur during requests.""" + """Handles network and protocol errors that may occur during requests.""" timeout_counter: dict[KoiNetNode, int] kobj_queue: KobjQueue @@ -21,9 +21,18 @@ def __init__( self.kobj_queue = kobj_queue self.handshaker = handshaker self.timeout_counter = {} - + + def reset_timeout_counter(self, node: KoiNetNode): + """Reset's a node timeout counter to zero.""" + self.timeout_counter[node] = 0 + def handle_connection_error(self, node: KoiNetNode): - """Drops nodes after timing out three times.""" + """Drops nodes after timing out three times. + + TODO: Need a better heuristic for network state. For example, if + a node lost connection to the internet, it would quickly forget + all other nodes. + """ self.timeout_counter.setdefault(node, 0) self.timeout_counter[node] += 1 @@ -32,15 +41,17 @@ def handle_connection_error(self, node: KoiNetNode): if self.timeout_counter[node] > 3: log.debug(f"Exceeded time out limit, forgetting node") self.kobj_queue.push(rid=node, event_type=EventType.FORGET) - # do something - def handle_protocol_error( self, error_type: ErrorType, node: KoiNetNode ): - """Attempts handshake when this node is unknown to target.""" + """Handles protocol errors that occur during network requests. + + Attempts handshake when this node is unknown to target. + """ + log.info(f"Handling protocol error {error_type} for node {node!r}") match error_type: case ErrorType.UnknownNode: diff --git a/src/koi_net/network/event_queue.py b/src/koi_net/network/event_queue.py index 53e9feb..cd12a02 100644 --- a/src/koi_net/network/event_queue.py +++ b/src/koi_net/network/event_queue.py @@ -14,7 +14,7 @@ class QueuedEvent(BaseModel): target: KoiNetNode class EventQueue: - """Handles out going network event queues.""" + """Queue for outgoing network events.""" q: Queue[QueuedEvent] def __init__(self): @@ -23,8 +23,8 @@ def __init__(self): def push(self, event: Event, target: KoiNetNode): """Pushes event to queue of specified node. - Event will be sent to webhook or poll queue depending on the - node type and edge type of the specified node. + Event will be sent to webhook or poll queue by the event worker + depending on the node type and edge type of the specified node. """ self.q.put(QueuedEvent(target=target, event=event)) diff --git a/src/koi_net/network/graph.py b/src/koi_net/network/graph.py index 7f76a0e..d4e5325 100644 --- a/src/koi_net/network/graph.py +++ b/src/koi_net/network/graph.py @@ -41,7 +41,11 @@ def generate(self): log.debug(f"Added edge {rid!r} ({edge_profile.source} -> {edge_profile.target})") log.debug("Done") - def get_edge(self, source: KoiNetNode, target: KoiNetNode,) -> KoiNetEdge | None: + def get_edge( + self, + source: KoiNetNode, + target: KoiNetNode + ) -> KoiNetEdge | None: """Returns edge RID given the RIDs of a source and target node.""" if (source, target) in self.dg.edges: edge_data = self.dg.get_edge_data(source, target) @@ -59,16 +63,16 @@ def get_edges( All edges returned by default, specify `direction` to restrict to incoming or outgoing edges only. """ - + edges = [] - if direction != "in" and self.dg.out_edges: + if (direction is None or direction == "out") and self.dg.out_edges: out_edges = self.dg.out_edges(self.identity.rid) edges.extend(out_edges) - - if direction != "out" and self.dg.in_edges: + + if (direction is None or direction == "in") and self.dg.in_edges: in_edges = self.dg.in_edges(self.identity.rid) edges.extend(in_edges) - + edge_rids = [] for edge in edges: edge_data = self.dg.get_edge_data(*edge) @@ -114,4 +118,3 @@ def get_neighbors( neighbors.add(edge_profile.target) return list(neighbors) - diff --git a/src/koi_net/network/poll_event_buffer.py b/src/koi_net/network/poll_event_buffer.py index 310212e..6150c37 100644 --- a/src/koi_net/network/poll_event_buffer.py +++ b/src/koi_net/network/poll_event_buffer.py @@ -1,26 +1,29 @@ from rid_lib.types import KoiNetNode -from koi_net.protocol.event import Event +from ..protocol.event import Event class PollEventBuffer: + """Stores outgoing events intended for polling nodes.""" buffers: dict[KoiNetNode, list[Event]] def __init__(self): self.buffers = dict() def push(self, node: KoiNetNode, event: Event): + """Pushes event to specified node.""" event_buf = self.buffers.setdefault(node, []) event_buf.append(event) - def flush(self, node: KoiNetNode, limit: int = 0): + def flush(self, node: KoiNetNode, limit: int = 0) -> list[Event]: + """Flushes all events for a given node, with an optional limit.""" event_buf = self.buffers.get(node, []) if limit and len(event_buf) > limit: - to_return = event_buf[:limit] + flushed_events = event_buf[:limit] self.buffers[node] = event_buf[limit:] else: - to_return = event_buf.copy() + flushed_events = event_buf.copy() self.buffers[node] = [] - return to_return \ No newline at end of file + return flushed_events \ No newline at end of file diff --git a/src/koi_net/network/request_handler.py b/src/koi_net/network/request_handler.py index 6f4a834..a999cde 100644 --- a/src/koi_net/network/request_handler.py +++ b/src/koi_net/network/request_handler.py @@ -4,8 +4,6 @@ from rid_lib.ext import Cache from rid_lib.types.koi_net_node import KoiNetNode -from koi_net.protocol.model_map import API_MODEL_MAP - from ..identity import NodeIdentity from ..protocol.api_models import ( RidsPayload, @@ -20,7 +18,6 @@ ResponseModels, ErrorResponse ) -from ..protocol.envelope import SignedEnvelope from ..protocol.consts import ( BROADCAST_EVENTS_PATH, POLL_EVENTS_PATH, @@ -29,26 +26,30 @@ FETCH_BUNDLES_PATH ) from ..protocol.node import NodeProfile, NodeType +from ..protocol.model_map import API_MODEL_MAP from ..secure import Secure from .error_handler import ErrorHandler log = structlog.stdlib.get_logger() +class KoiNetRequestError(Exception): + pass + # Custom error types for request handling -class SelfRequestError(Exception): +class SelfRequestError(KoiNetRequestError): """Raised when a node tries to request itself.""" pass -class PartialNodeQueryError(Exception): +class PartialNodeQueryError(KoiNetRequestError): """Raised when attempting to query a partial node.""" pass -class NodeNotFoundError(Exception): +class NodeNotFoundError(KoiNetRequestError): """Raised when a node URL cannot be found.""" pass -class UnknownPathError(Exception): +class UnknownPathError(KoiNetRequestError): """Raised when an unknown path is requested.""" pass @@ -72,31 +73,21 @@ def __init__( self.secure = secure self.error_handler = error_handler - def get_url(self, node_rid: KoiNetNode) -> str: + def get_base_url(self, node_rid: KoiNetNode) -> str: """Retrieves URL of a node from its RID.""" - log.debug(f"Getting URL for {node_rid!r}") - node_url = None - - if node_rid == self.identity.rid: - raise SelfRequestError("Don't talk to yourself") - node_bundle = self.cache.read(node_rid) - if node_bundle: node_profile = node_bundle.validate_contents(NodeProfile) - log.debug(f"Found node profile: {node_profile}") if node_profile.node_type != NodeType.FULL: - raise PartialNodeQueryError("Can't query partial node") + raise PartialNodeQueryError("Partial nodes don't have URLs") node_url = node_profile.base_url - else: - if node_rid == self.identity.config.koi_net.first_contact.rid: - log.debug("Found URL of first contact") - node_url = self.identity.config.koi_net.first_contact.url + elif node_rid == self.identity.config.koi_net.first_contact.rid: + node_url = self.identity.config.koi_net.first_contact.url - if not node_url: - raise NodeNotFoundError("Node not found") + else: + raise NodeNotFoundError(f"URL not found for {node_rid!r}") log.debug(f"Resolved {node_rid!r} to {node_url}") return node_url @@ -108,7 +99,10 @@ def make_request( request: RequestModels, ) -> ResponseModels | None: """Makes a request to a node.""" - url = self.get_url(node) + path + if node == self.identity.rid: + raise SelfRequestError("Don't talk to yourself") + + url = self.get_base_url(node) + path log.info(f"Making request to {url}") signed_envelope = self.secure.create_envelope( @@ -116,12 +110,13 @@ def make_request( target=node ) + data = signed_envelope.model_dump_json(exclude_none=True) + try: - result = httpx.post( - url, - data=signed_envelope.model_dump_json(exclude_none=True) - ) - except httpx.ConnectError as err: + result = httpx.post(url, data=data) + self.error_handler.reset_timeout_counter(node) + + except httpx.RequestError as err: log.debug("Failed to connect") self.error_handler.handle_connection_error(node) raise err @@ -132,7 +127,7 @@ def make_request( return resp resp_env_model = API_MODEL_MAP[path].response_envelope - if resp_env_model is None: + if not resp_env_model: return resp_envelope = resp_env_model.model_validate_json(result.text) @@ -159,7 +154,7 @@ def poll_events( node: RID, req: PollEvents | None = None, **kwargs - ) -> EventsPayload: + ) -> EventsPayload | ErrorResponse: """Polls events from a node. Pass `PollEvents` object as `req` or fields as kwargs. @@ -175,7 +170,7 @@ def fetch_rids( node: RID, req: FetchRids | None = None, **kwargs - ) -> RidsPayload: + ) -> RidsPayload | ErrorResponse: """Fetches RIDs from a node. Pass `FetchRids` object as `req` or fields as kwargs. @@ -191,7 +186,7 @@ def fetch_manifests( node: RID, req: FetchManifests | None = None, **kwargs - ) -> ManifestsPayload: + ) -> ManifestsPayload | ErrorResponse: """Fetches manifests from a node. Pass `FetchManifests` object as `req` or fields as kwargs. @@ -207,7 +202,7 @@ def fetch_bundles( node: RID, req: FetchBundles | None = None, **kwargs - ) -> BundlesPayload: + ) -> BundlesPayload | ErrorResponse: """Fetches bundles from a node. Pass `FetchBundles` object as `req` or fields as kwargs. diff --git a/src/koi_net/network/resolver.py b/src/koi_net/network/resolver.py index 065d1e6..df5f94f 100644 --- a/src/koi_net/network/resolver.py +++ b/src/koi_net/network/resolver.py @@ -52,15 +52,21 @@ def get_state_providers(self, rid_type: RIDType) -> list[KoiNetNode]: continue node_bundle = self.cache.read(node_rid) - node_profile = node_bundle.validate_contents(NodeProfile) - if (node_profile.node_type == NodeType.FULL) and (rid_type in node_profile.provides.state): - log.debug(f"Found provider {node_rid!r}") - provider_nodes.append(node_rid) + if node_profile.node_type != NodeType.FULL: + continue + + if rid_type not in node_profile.provides.state: + continue + + provider_nodes.append(node_rid) - if not provider_nodes: + if provider_nodes: + log.debug(f"Found provider(s) {provider_nodes}") + else: log.debug("Failed to find providers") + return provider_nodes def fetch_remote_bundle(self, rid: RID) -> tuple[Bundle | None, KoiNetNode | None]: @@ -72,6 +78,9 @@ def fetch_remote_bundle(self, rid: RID) -> tuple[Bundle | None, KoiNetNode | Non payload = self.request_handler.fetch_bundles( node=node_rid, rids=[rid]) + if type(payload) == ErrorResponse: + continue + if payload.bundles: remote_bundle = payload.bundles[0] log.debug(f"Got bundle from {node_rid!r}") @@ -91,6 +100,9 @@ def fetch_remote_manifest(self, rid: RID) -> tuple[Bundle | None, KoiNetNode | N payload = self.request_handler.fetch_manifests( node=node_rid, rids=[rid]) + if type(payload) == ErrorResponse: + continue + if payload.manifests: remote_manifest = payload.manifests[0] log.debug(f"Got bundle from {node_rid!r}") @@ -138,10 +150,9 @@ def poll_neighbors(self) -> dict[KoiNetNode, list[Event]]: if payload.events: log.debug(f"Received {len(payload.events)} events from {node_rid!r}") - event_dict[node_rid] = payload.events - except httpx.ConnectError: + except httpx.RequestError: log.debug(f"Failed to reach node {node_rid!r}") continue diff --git a/src/koi_net/network/response_handler.py b/src/koi_net/network/response_handler.py index 38f43ec..1abbb12 100644 --- a/src/koi_net/network/response_handler.py +++ b/src/koi_net/network/response_handler.py @@ -8,11 +8,9 @@ from koi_net.processor.kobj_queue import KobjQueue from koi_net.protocol.consts import BROADCAST_EVENTS_PATH, FETCH_BUNDLES_PATH, FETCH_MANIFESTS_PATH, FETCH_RIDS_PATH, POLL_EVENTS_PATH from koi_net.protocol.envelope import SignedEnvelope -from koi_net.protocol.model_map import API_MODEL_MAP from koi_net.secure import Secure from ..protocol.api_models import ( - ApiModels, EventsPayload, PollEvents, RidsPayload, From e9423741d400916f883e4e411035ae3943d51873 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Sun, 16 Nov 2025 15:57:29 -0500 Subject: [PATCH 30/53] updated docstrings and cleaned up code in processor modules --- src/koi_net/processor/context.py | 2 +- src/koi_net/processor/handler.py | 11 +- src/koi_net/processor/knowledge_handlers.py | 122 ++++++++++---------- src/koi_net/processor/knowledge_object.py | 24 ++-- src/koi_net/processor/kobj_queue.py | 10 +- src/koi_net/processor/pipeline.py | 55 ++++++--- 6 files changed, 123 insertions(+), 101 deletions(-) diff --git a/src/koi_net/processor/context.py b/src/koi_net/processor/context.py index bfa5f1d..cdd3870 100644 --- a/src/koi_net/processor/context.py +++ b/src/koi_net/processor/context.py @@ -11,7 +11,7 @@ class HandlerContext: - """Provides knowledge handlers access to other subsystems.""" + """Context object provides knowledge handlers access to other components.""" identity: NodeIdentity config: NodeConfig diff --git a/src/koi_net/processor/handler.py b/src/koi_net/processor/handler.py index af62944..b7704a5 100644 --- a/src/koi_net/processor/handler.py +++ b/src/koi_net/processor/handler.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from enum import StrEnum from typing import Callable -from rid_lib import RIDType +from rid_lib.core import RIDType from ..protocol.event import EventType from .knowledge_object import KnowledgeObject @@ -9,7 +9,7 @@ class StopChain: - """Class for a sentinel value by knowledge handlers.""" + """Class for STOP_CHAIN sentinel returned by knowledge handlers.""" pass STOP_CHAIN = StopChain() @@ -54,12 +54,7 @@ def create( rid_types: tuple[RIDType] = (), event_types: tuple[EventType | None] = () ): - """Decorator wraps a function, returns a KnowledgeHandler. - - The function symbol will redefined as a `KnowledgeHandler`, - which can be passed into the `ProcessorInterface` constructor. - This is used to register default handlers. - """ + """Decorator wraps a function, returns a KnowledgeHandler.""" def decorator(func: Callable) -> KnowledgeHandler: handler = cls(func, handler_type, rid_types, event_types) return handler diff --git a/src/koi_net/processor/knowledge_handlers.py b/src/koi_net/processor/knowledge_handlers.py index f90314e..50662af 100644 --- a/src/koi_net/processor/knowledge_handlers.py +++ b/src/koi_net/processor/knowledge_handlers.py @@ -9,7 +9,7 @@ from .knowledge_object import KnowledgeObject from .context import HandlerContext from ..protocol.event import Event, EventType -from ..protocol.edge import EdgeProfile, EdgeStatus, EdgeType +from ..protocol.edge import EdgeProfile, EdgeStatus, EdgeType, generate_edge_bundle from ..protocol.node import NodeProfile log = structlog.stdlib.get_logger() @@ -21,27 +21,30 @@ def basic_rid_handler(ctx: HandlerContext, kobj: KnowledgeObject): """Default RID handler. - Blocks external events about this node. Allows `FORGET` events if - RID is known to this node. + Blocks external events about this node. Sets normalized event type + for `FORGET` events. """ - if (kobj.rid == ctx.identity.rid and kobj.source): - log.debug("Don't let anyone else tell me who I am!") + + if (kobj.rid == ctx.identity.rid and kobj.source is not None): + log.debug("Externally sourced events about this node not allowed.") return STOP_CHAIN if kobj.event_type == EventType.FORGET: kobj.normalized_event_type = EventType.FORGET return kobj + # Manifest handlers @KnowledgeHandler.create(HandlerType.Manifest) def basic_manifest_handler(ctx: HandlerContext, kobj: KnowledgeObject): - """Decider based on incoming manifest and cache state. + """Normalized event decider based on manifest and cache state. - Blocks manifests which have the same hash, or aren't newer than the - cached version. Sets the normalized event type to `NEW` or `UPDATE` - depending on whether the RID was previously known. + Stops processing for manifests which have the same hash, or aren't + newer than the cached version. Sets the normalized event type to + `NEW` or `UPDATE` depending on whether the RID was previously known. """ + prev_bundle = ctx.cache.read(kobj.rid) if prev_bundle: @@ -66,13 +69,13 @@ def basic_manifest_handler(ctx: HandlerContext, kobj: KnowledgeObject): @KnowledgeHandler.create( handler_type=HandlerType.Bundle, - rid_types=[KoiNetNode], - event_types=[EventType.NEW, EventType.UPDATE]) + rid_types=(KoiNetNode,), + event_types=(EventType.NEW, EventType.UPDATE)) def secure_profile_handler(ctx: HandlerContext, kobj: KnowledgeObject): """Maintains security of cached node profiles. Blocks bundles with a mismatching public keys in their node profile - and RID from continuing through the pipeline. + and RID from being written to cache. """ node_profile = kobj.bundle.validate_contents(NodeProfile) @@ -84,24 +87,25 @@ def secure_profile_handler(ctx: HandlerContext, kobj: KnowledgeObject): @KnowledgeHandler.create( handler_type=HandlerType.Bundle, - rid_types=[KoiNetEdge], - event_types=[EventType.NEW, EventType.UPDATE]) + rid_types=(KoiNetEdge,), + event_types=(EventType.NEW, EventType.UPDATE)) def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): - """Handles basic edge negotiation process. + """Handles edge negotiation process. Automatically approves proposed edges if they request RID types this - node can provide (or KOI nodes/edges). Validates the edge type is - allowed for the node type (partial nodes cannot use webhooks). If + node can provide (or KOI node, edge RIDs). Validates the edge type + is allowed for the node type (partial nodes cannot use webhooks). If edge is invalid, a `FORGET` event is sent to the other node. """ - # only respond when source is another node - if kobj.source is None: return + # only handle incoming events (ignore internal edge knowledge objects) + if kobj.source is None: + return edge_profile = kobj.bundle.validate_contents(EdgeProfile) - # indicates peer subscribing to me - if edge_profile.source == ctx.identity.rid: + # indicates peer subscribing to this node + if edge_profile.source == ctx.identity.rid: if edge_profile.status != EdgeStatus.PROPOSED: return @@ -122,7 +126,6 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): KoiNetNode, KoiNetEdge ) - abort = False if (edge_profile.edge_type == EdgeType.WEBHOOK and peer_profile.node_type == NodeType.PARTIAL): @@ -137,9 +140,8 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): event = Event.from_rid(EventType.FORGET, kobj.rid) ctx.event_queue.push(event, peer_rid, flush=True) return STOP_CHAIN - + else: - # approve edge profile log.debug("Approving proposed edge") edge_profile.status = EdgeStatus.APPROVED updated_bundle = Bundle.generate(kobj.rid, edge_profile.model_dump()) @@ -154,7 +156,7 @@ def edge_negotiation_handler(ctx: HandlerContext, kobj: KnowledgeObject): # Network handlers -@KnowledgeHandler.create(HandlerType.Network, rid_types=[KoiNetNode]) +@KnowledgeHandler.create(HandlerType.Network, rid_types=(KoiNetNode,)) def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): """Makes contact with providers of RID types of interest. @@ -177,15 +179,12 @@ def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): if not available_rid_types: return - log.info("Identified a coordinator!") - log.info("Proposing new edge") - - # already have an edge established edge_rid = ctx.graph.get_edge( source=kobj.rid, target=ctx.identity.rid, ) + # already have an edge established if edge_rid: prev_edge_bundle = ctx.cache.read(edge_rid) edge_profile = prev_edge_bundle.validate_contents(EdgeProfile) @@ -194,32 +193,30 @@ def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): # no change in rid types return + log.info(f"Proposing updated edge with node provider {available_rid_types}") + edge_profile.rid_types = available_rid_types edge_profile.status = EdgeStatus.PROPOSED - + edge_bundle = Bundle.generate(edge_rid, edge_profile.model_dump()) + + # no existing edge else: - source = kobj.rid - target = ctx.identity.rid - if ctx.identity.profile.node_type == NodeType.FULL: - edge_type = EdgeType.WEBHOOK - else: - edge_type = EdgeType.POLL - - edge_rid = KoiNetEdge(sha256_hash(str(source) + str(target))) - edge_profile = EdgeProfile( - source=source, - target=target, + log.info(f"Proposing new edge with node provider {available_rid_types}") + edge_bundle = generate_edge_bundle( + source=kobj.rid, + target=ctx.identity.rid, rid_types=available_rid_types, - edge_type=edge_type, - status=EdgeStatus.PROPOSED + edge_type=( + EdgeType.WEBHOOK + if ctx.identity.profile.node_type == NodeType.FULL + else EdgeType.POLL + ) ) # queued for processing - edge_bundle = Bundle.generate(edge_rid, edge_profile.model_dump()) ctx.kobj_queue.push(bundle=edge_bundle) log.info("Catching up on network state") - payload = ctx.request_handler.fetch_rids( node=kobj.rid, rid_types=available_rid_types @@ -236,42 +233,40 @@ def node_contact_handler(ctx: HandlerContext, kobj: KnowledgeObject): # will fetch remotely instead of checking local cache ctx.kobj_queue.push(rid=rid, source=kobj.rid) log.info("Done") - @KnowledgeHandler.create(HandlerType.Network) def basic_network_output_filter(ctx: HandlerContext, kobj: KnowledgeObject): - """Adds subscriber nodes to network targetes. + """Sets network targets of outgoing event for knowledge object. Allows broadcasting of all RID types this node is an event provider for (set in node profile), and other nodes have subscribed to. All - nodes will also broadcast about their own (internally sourced) KOI - node, and KOI edges that they are part of, regardless of their node - profile configuration. Finally, nodes will also broadcast about - edges to the other node involved (regardless of if they are - subscribed). + nodes will also broadcast events about their own (internally sourced) + KOI node, and KOI edges that they are part of, regardless of their + node profile configuration. Finally, nodes will also broadcast about + edges to the other node involved (regardless of if they are subscribed). """ - involves_me = False + involves_this_node = False + # internally source knowledge objects if kobj.source is None: - if (type(kobj.rid) == KoiNetNode): + if type(kobj.rid) is KoiNetNode: if (kobj.rid == ctx.identity.rid): - involves_me = True + involves_this_node = True - elif type(kobj.rid) == KoiNetEdge: + elif type(kobj.rid) is KoiNetEdge: edge_profile = kobj.bundle.validate_contents(EdgeProfile) if edge_profile.source == ctx.identity.rid: log.debug(f"Adding edge target '{edge_profile.target!r}' to network targets") - kobj.network_targets.update([edge_profile.target]) - involves_me = True + kobj.network_targets.add(edge_profile.target) + involves_this_node = True elif edge_profile.target == ctx.identity.rid: log.debug(f"Adding edge source '{edge_profile.source!r}' to network targets") - kobj.network_targets.update([edge_profile.source]) - involves_me = True + kobj.network_targets.add(edge_profile.source) + involves_this_node = True - if (type(kobj.rid) in ctx.identity.profile.provides.event or involves_me): - # broadcasts to subscribers if I'm an event provider of this RID type OR it involves me + if (type(kobj.rid) in ctx.identity.profile.provides.event or involves_this_node): subscribers = ctx.graph.get_neighbors( direction="out", allowed_type=type(kobj.rid) @@ -291,7 +286,8 @@ def forget_edge_on_node_deletion(ctx: HandlerContext, kobj: KnowledgeObject): for edge_rid in ctx.graph.get_edges(): edge_bundle = ctx.cache.read(edge_rid) - if not edge_bundle: continue + if not edge_bundle: + continue edge_profile = edge_bundle.validate_contents(EdgeProfile) if kobj.rid in (edge_profile.source, edge_profile.target): diff --git a/src/koi_net/processor/knowledge_object.py b/src/koi_net/processor/knowledge_object.py index 0c8c240..c6c17d2 100644 --- a/src/koi_net/processor/knowledge_object.py +++ b/src/koi_net/processor/knowledge_object.py @@ -10,20 +10,22 @@ class KnowledgeObject(BaseModel): Represents an RID, manifest, bundle, or event. Contains three fields (`normalized_event_type`, `source`, `network_targets`) used for - decision making in the knowledge processing pipeline. The source - indicates which node this object originated from, or `None` if it - was generated by this node. + decision making in the knowledge processing pipeline. + + The source indicates which node this object originated from, or + `None` if it was generated by this node. The normalized event type indicates how the knowledge object is viewed from the perspective of this node, and what cache actions will take place. (`NEW`, `UPDATE`) -> cache write, `FORGET` -> - cache delete, `None` -> no cache action. + cache delete, `None` -> no cache action, and end of pipeline. The network targets indicate other nodes in the network this knowledge object will be sent to. The event sent to them will be constructed from this knowledge object's RID, manifest, contents, and normalized event type. """ + rid: RID manifest: Manifest | None = None contents: dict | None = None @@ -43,6 +45,7 @@ def from_rid( source: KoiNetNode | None = None ) -> "KnowledgeObject": """Creates a `KnowledgeObject` from an `RID`.""" + return cls( rid=rid, event_type=event_type, @@ -57,6 +60,7 @@ def from_manifest( source: KoiNetNode | None = None ) -> "KnowledgeObject": """Creates a `KnowledgeObject` from a `Manifest`.""" + return cls( rid=manifest.rid, manifest=manifest, @@ -72,6 +76,7 @@ def from_bundle( source: KoiNetNode | None = None ) -> "KnowledgeObject": """Creates a `KnowledgeObject` from a `Bundle`.""" + return cls( rid=bundle.rid, manifest=bundle.manifest, @@ -87,6 +92,7 @@ def from_event( source: KoiNetNode | None = None ) -> "KnowledgeObject": """Creates a `KnowledgeObject` from an `Event`.""" + return cls( rid=event.rid, manifest=event.manifest, @@ -96,10 +102,11 @@ def from_event( ) @property - def bundle(self): + def bundle(self) -> Bundle: """Bundle representation of knowledge object.""" + if self.manifest is None or self.contents is None: - return + raise ValueError("Knowledge object missing manifest or contents, cannot convert to `Bundle`.") return Bundle( manifest=self.manifest, @@ -107,8 +114,9 @@ def bundle(self): ) @property - def normalized_event(self): + def normalized_event(self) -> Event: """Event representation of knowledge object.""" + if self.normalized_event_type is None: raise ValueError("Internal event's normalized event type is None, cannot convert to Event") @@ -118,7 +126,7 @@ def normalized_event(self): event_type=EventType.FORGET ) - else: + else: return Event( rid=self.rid, event_type=self.normalized_event_type, diff --git a/src/koi_net/processor/kobj_queue.py b/src/koi_net/processor/kobj_queue.py index e3a44ae..bfad24c 100644 --- a/src/koi_net/processor/kobj_queue.py +++ b/src/koi_net/processor/kobj_queue.py @@ -10,7 +10,7 @@ class KobjQueue: - """Provides access to this node's knowledge processing pipeline.""" + """Queue for knowledge objects entering the processing pipeline.""" q: Queue[KnowledgeObject] def __init__(self): @@ -26,14 +26,14 @@ def push( event_type: EventType | None = None, source: KoiNetNode | None = None ): - """Queues knowledge object to be handled by processing pipeline. + """Pushes knowledge object to queue. - Knowledge may take the form of an RID, manifest, bundle, event, + Input may take the form of an RID, manifest, bundle, event, or knowledge object (with an optional event type for RID, manifest, or bundle objects). All objects will be normalized - to knowledge objects and queued. If `flush` is `True`, the queue - will be flushed immediately after adding the new knowledge. + to knowledge objects and queued. """ + if rid: _kobj = KnowledgeObject.from_rid(rid, event_type, source) elif manifest: diff --git a/src/koi_net/processor/pipeline.py b/src/koi_net/processor/pipeline.py index 20e3a8f..e1e8339 100644 --- a/src/koi_net/processor/pipeline.py +++ b/src/koi_net/processor/pipeline.py @@ -34,7 +34,7 @@ def __init__( request_handler: RequestHandler, event_queue: EventQueue, graph: NetworkGraph, - knowledge_handlers: list[KnowledgeHandler] = [] + knowledge_handlers: list[KnowledgeHandler] ): self.handler_context = handler_context self.cache = cache @@ -79,29 +79,52 @@ def call_handler_chain( if resp is STOP_CHAIN: log.debug(f"Handler chain stopped by {handler.func.__name__}") return STOP_CHAIN + # kobj unmodified elif resp is None: continue + # kobj modified by handler elif isinstance(resp, KnowledgeObject): kobj = resp log.debug(f"Knowledge object modified by {handler.func.__name__}") + else: raise ValueError(f"Handler {handler.func.__name__} returned invalid response '{resp}'") - + return kobj def process(self, kobj: KnowledgeObject): - """Sends provided knowledge obejct through knowledge processing pipeline. + """Sends knowledge object through knowledge processing pipeline. - Handler chains are called in between major events in the pipeline, indicated by their handler type. Each handler type is guaranteed to have access to certain knowledge, and may affect a subsequent action in the pipeline. The five handler types are as follows: - - RID - provided RID; if event type is `FORGET`, this handler decides whether to delete the knowledge from the cache by setting the normalized event type to `FORGET`, otherwise this handler decides whether to validate the manifest (and fetch it if not provided). - - Manifest - provided RID, manifest; decides whether to validate the bundle (and fetch it if not provided). - - Bundle - provided RID, manifest, contents (bundle); decides whether to write knowledge to the cache by setting the normalized event type to `NEW` or `UPDATE`. - - Network - provided RID, manifest, contents (bundle); decides which nodes (if any) to broadcast an event about this knowledge to. (Note, if event type is `FORGET`, the manifest and contents will be retrieved from the local cache, and indicate the last state of the knowledge before it was deleted.) - - Final - provided RID, manifests, contents (bundle); final action taken after network broadcast. + Handler chains are called in between major events in the + pipeline, indicated by their handler type. Each handler type is + guaranteed to have access to certain knowledge, and may affect a + subsequent action in the pipeline. The five handler types are as + follows: + - RID - provided RID; if event type is `FORGET`, this handler + decides whether to delete the knowledge from the cache by + setting the normalized event type to `FORGET`, otherwise this + handler decides whether to validate the manifest (and fetch it + if not provided). After processing, if event type is `FORGET`, + the manifest and contents will be retrieved from the local cache, + and indicate the last state of the knowledge before it was + deleted. + - Manifest - provided RID, manifest; decides whether to validate + the bundle (and fetch it if not provided). + - Bundle - provided RID, manifest, contents (bundle); decides + whether to write knowledge to the cache by setting the + normalized event type to `NEW` or `UPDATE`. + - Network - provided RID, manifest, contents (bundle); decides + which nodes (if any) to broadcast an event about this knowledge + to. + - Final - provided RID, manifests, contents (bundle); final + action taken after network broadcast. - The pipeline may be stopped by any point by a single handler returning the `STOP_CHAIN` sentinel. In that case, the process will exit immediately. Further handlers of that type and later handler chains will not be called. + The pipeline may be stopped by any point by a single handler + returning the `STOP_CHAIN` sentinel. In that case, the process + will exit immediately. Further handlers of that type and later + handler chains will not be called. """ log.debug(f"Handling {kobj!r}") @@ -110,7 +133,7 @@ def process(self, kobj: KnowledgeObject): if kobj.event_type == EventType.FORGET: bundle = self.cache.read(kobj.rid) - if not bundle: + if not bundle: log.debug("Local bundle not found") return @@ -131,7 +154,7 @@ def process(self, kobj: KnowledgeObject): node=kobj.source, rids=[kobj.rid] ) - + if not payload.manifests: log.debug("Failed to find manifest") return @@ -142,7 +165,7 @@ def process(self, kobj: KnowledgeObject): if kobj is STOP_CHAIN: return # attempt to retrieve bundle - if not kobj.bundle: + if not kobj.contents: log.debug("Bundle not found") if kobj.source is None: return @@ -157,13 +180,13 @@ def process(self, kobj: KnowledgeObject): log.debug("Failed to find bundle") return - bundle = payload.bundles[0] + bundle = payload.bundles[0] if kobj.manifest != bundle.manifest: log.warning("Retrieved bundle contains a different manifest") kobj.manifest = bundle.manifest - kobj.contents = bundle.contents + kobj.contents = bundle.contents kobj = self.call_handler_chain(HandlerType.Bundle, kobj) if kobj is STOP_CHAIN: return @@ -177,7 +200,7 @@ def process(self, kobj: KnowledgeObject): self.cache.delete(kobj.rid) else: - log.debug("Normalized event type was never set, no cache or network operations will occur") + log.debug("Normalized event type was not set, no cache or network operations will occur") return if type(kobj.rid) in (KoiNetNode, KoiNetEdge): From 87dbfa71d85eeda381e21e7ad175eade66644700 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Mon, 17 Nov 2025 12:42:29 -0500 Subject: [PATCH 31/53] updated documentation for protocol and worker modules --- src/koi_net/protocol/api_models.py | 1 + src/koi_net/protocol/edge.py | 5 +++++ src/koi_net/protocol/envelope.py | 20 ++++++++++++----- src/koi_net/protocol/errors.py | 2 ++ src/koi_net/protocol/model_map.py | 1 + src/koi_net/protocol/secure.py | 35 +++++++++++++++-------------- src/koi_net/workers/base.py | 7 ++++-- src/koi_net/workers/event_worker.py | 16 +++++++++---- src/koi_net/workers/kobj_worker.py | 4 +++- 9 files changed, 61 insertions(+), 30 deletions(-) diff --git a/src/koi_net/protocol/api_models.py b/src/koi_net/protocol/api_models.py index 2189f48..c6d1152 100644 --- a/src/koi_net/protocol/api_models.py +++ b/src/koi_net/protocol/api_models.py @@ -60,6 +60,7 @@ class ErrorResponse(BaseModel): type RequestModels = EventsPayload | PollEvents | FetchRids | FetchManifests | FetchBundles type ResponseModels = RidsPayload | ManifestsPayload | BundlesPayload | EventsPayload | ErrorResponse + type ApiModels = Annotated[ RequestModels | ResponseModels, Field(discriminator="type") diff --git a/src/koi_net/protocol/edge.py b/src/koi_net/protocol/edge.py index daeecf0..aded064 100644 --- a/src/koi_net/protocol/edge.py +++ b/src/koi_net/protocol/edge.py @@ -28,9 +28,12 @@ def generate_edge_bundle( rid_types: list[RIDType], edge_type: EdgeType ) -> Bundle: + """Returns edge bundle.""" + edge_rid = KoiNetEdge(sha256_hash( str(source) + str(target) )) + edge_profile = EdgeProfile( source=source, target=target, @@ -38,8 +41,10 @@ def generate_edge_bundle( edge_type=edge_type, status=EdgeStatus.PROPOSED ) + edge_bundle = Bundle.generate( edge_rid, edge_profile.model_dump() ) + return edge_bundle \ No newline at end of file diff --git a/src/koi_net/protocol/envelope.py b/src/koi_net/protocol/envelope.py index cfe8141..3cce4fe 100644 --- a/src/koi_net/protocol/envelope.py +++ b/src/koi_net/protocol/envelope.py @@ -12,15 +12,21 @@ T = TypeVar("T", bound=RequestModels | ResponseModels) class SignedEnvelope(BaseModel, Generic[T]): - model_config = ConfigDict(exclude_none=True) - payload: T source_node: KoiNetNode target_node: KoiNetNode signature: str - def verify_with(self, pub_key: PublicKey): + model_config = ConfigDict(exclude_none=True) + + def verify_with(self, pub_key: PublicKey): + """Verifies signed envelope with public key. + + Raises `cryptography.exceptions.InvalidSignature` on failure. + """ + # IMPORTANT: calling `model_dump()` loses all typing! when converting between SignedEnvelope and UnsignedEnvelope, use the Pydantic classes, not the dictionary form + unsigned_envelope = UnsignedEnvelope[T]( payload=self.payload, source_node=self.source_node, @@ -28,20 +34,22 @@ def verify_with(self, pub_key: PublicKey): ) log.debug(f"Verifying envelope: {unsigned_envelope.model_dump_json(exclude_none=True)}") - + pub_key.verify( self.signature, unsigned_envelope.model_dump_json(exclude_none=True).encode() ) class UnsignedEnvelope(BaseModel, Generic[T]): - model_config = ConfigDict(exclude_none=True) - payload: T source_node: KoiNetNode target_node: KoiNetNode + model_config = ConfigDict(exclude_none=True) + def sign_with(self, priv_key: PrivateKey) -> SignedEnvelope[T]: + """Signs with private key and returns `SignedEnvelope`.""" + log.debug(f"Signing envelope: {self.model_dump_json(exclude_none=True)}") log.debug(f"Type: [{type(self.payload)}]") diff --git a/src/koi_net/protocol/errors.py b/src/koi_net/protocol/errors.py index 2d1ebe7..1784097 100644 --- a/src/koi_net/protocol/errors.py +++ b/src/koi_net/protocol/errors.py @@ -1,3 +1,5 @@ +"""Defines KOI-net protocol errors.""" + from enum import StrEnum diff --git a/src/koi_net/protocol/model_map.py b/src/koi_net/protocol/model_map.py index 79ed6c2..192cb38 100644 --- a/src/koi_net/protocol/model_map.py +++ b/src/koi_net/protocol/model_map.py @@ -27,6 +27,7 @@ class Models(NamedTuple): response_envelope: type[SignedEnvelope] | None +# maps API paths to request and response models API_MODEL_MAP: dict[str, Models] = { BROADCAST_EVENTS_PATH: Models( request=EventsPayload, diff --git a/src/koi_net/protocol/secure.py b/src/koi_net/protocol/secure.py index c8387f4..8aa9d36 100644 --- a/src/koi_net/protocol/secure.py +++ b/src/koi_net/protocol/secure.py @@ -14,7 +14,7 @@ def der_to_raw_signature(der_signature: bytes, curve=ec.SECP256R1()) -> bytes: - """Convert a DER-encoded signature to raw r||s format.""" + """Converts a DER-encoded signature to raw r||s format.""" # Decode the DER signature to get r and s r, s = decode_dss_signature(der_signature) @@ -31,7 +31,7 @@ def der_to_raw_signature(der_signature: bytes, curve=ec.SECP256R1()) -> bytes: def raw_to_der_signature(raw_signature: bytes, curve=ec.SECP256R1()) -> bytes: - """Convert a raw r||s signature to DER format.""" + """Converts a raw r||s signature to DER format.""" # Determine byte length based on curve bit size byte_length = (curve.key_size + 7) // 8 @@ -59,13 +59,16 @@ def __init__(self, priv_key): @classmethod def generate(cls): + """Generates a new `Private Key`.""" return cls(priv_key=ec.generate_private_key(ec.SECP256R1())) def public_key(self) -> "PublicKey": + """Returns instance of `PublicKey` dervied from this private key.""" return PublicKey(self.priv_key.public_key()) @classmethod def from_pem(cls, priv_key_pem: str, password: str): + """Loads `PrivateKey` from encrypted PEM string.""" return cls( priv_key=serialization.load_pem_private_key( data=priv_key_pem.encode(), @@ -74,6 +77,7 @@ def from_pem(cls, priv_key_pem: str, password: str): ) def to_pem(self, password: str) -> str: + """Saves `PrivateKey` to encrypted PEM string.""" return self.priv_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, @@ -81,6 +85,7 @@ def to_pem(self, password: str) -> str: ).decode() def sign(self, message: bytes) -> str: + """Returns base64 encoded raw signature bytes of the form r||s.""" hashed_message = sha256_hash(message.decode()) der_signature_bytes = self.priv_key.sign( @@ -97,7 +102,7 @@ def sign(self, message: bytes) -> str: log.debug(f"signature: {signature}") return signature - + class PublicKey: pub_key: ec.EllipticCurvePublicKey @@ -107,6 +112,7 @@ def __init__(self, pub_key): @classmethod def from_pem(cls, pub_key_pem: str): + """Loads `PublicKey` from PEM string.""" return cls( pub_key=serialization.load_pem_public_key( data=pub_key_pem.encode() @@ -114,13 +120,15 @@ def from_pem(cls, pub_key_pem: str): ) def to_pem(self) -> str: + """Saves `PublicKey` to PEM string.""" return self.pub_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo ).decode() @classmethod - def from_der(cls, pub_key_der: str): + def from_der(cls, pub_key_der: str): + """Loads `PublicKey` from base64 encoded DER string.""" return cls( pub_key=serialization.load_der_public_key( data=b64decode(pub_key_der) @@ -128,6 +136,7 @@ def from_der(cls, pub_key_der: str): ) def to_der(self) -> str: + """Saves `PublicKey` to base64 encoded DER string.""" return b64encode( self.pub_key.public_bytes( encoding=serialization.Encoding.DER, @@ -136,29 +145,21 @@ def to_der(self) -> str: ).decode() def to_node_rid(self, name) -> KoiNetNode: + """Returns an orn:koi-net.node RID from hashed DER string.""" return KoiNetNode( name=name, hash=sha256_hash(self.to_der()) ) - def verify(self, signature: str, message: bytes) -> bool: - # hashed_message = sha256_hash(message.decode()) + def verify(self, signature: str, message: bytes): + """Verifies a signature for a message. - # print(message.hex()) - # print() - # print(hashed_message) - # print() - # print(message.decode()) - - # log.debug(f"Verifying message with [{self.to_der()}]") - # log.debug(f"hash: {hashed_message}") - # log.debug(f"signature: {signature}") + Raises `cryptography.exceptions.InvalidSignature` on failure. + """ raw_signature_bytes = b64decode(signature) der_signature_bytes = raw_to_der_signature(raw_signature_bytes) - # NOTE: throws cryptography.exceptions.InvalidSignature on failure - self.pub_key.verify( signature=der_signature_bytes, data=message, diff --git a/src/koi_net/workers/base.py b/src/koi_net/workers/base.py index fa3b7b0..fa8d227 100644 --- a/src/koi_net/workers/base.py +++ b/src/koi_net/workers/base.py @@ -2,16 +2,19 @@ class End: - """Class for a sentinel value by knowledge handlers.""" + """Class for STOP_WORKER sentinel pushed to worker queues.""" pass STOP_WORKER = End() class ThreadWorker: + """Base class for thread workers.""" + thread: threading.Thread def __init__(self): self.thread = threading.Thread(target=self.run) def run(self): - ... \ No newline at end of file + """Processing loop for thread.""" + pass \ No newline at end of file diff --git a/src/koi_net/workers/event_worker.py b/src/koi_net/workers/event_worker.py index 50f49e2..2e6af74 100644 --- a/src/koi_net/workers/event_worker.py +++ b/src/koi_net/workers/event_worker.py @@ -18,15 +18,17 @@ class EventProcessingWorker(ThreadWorker): + """Thread worker that processes the `event_queue`.""" + event_buffer: dict[KoiNetNode, list[Event]] buffer_times: dict[KoiNetNode, float] def __init__( self, - event_queue: EventQueue, - request_handler: RequestHandler, config: NodeConfig, cache: Cache, + event_queue: EventQueue, + request_handler: RequestHandler, poll_event_buf: PollEventBuffer ): self.event_queue = event_queue @@ -55,6 +57,12 @@ def flush_buffer(self, target: KoiNetNode, buffer: list[Event]): self.buffer_times[target] = None def decide_event(self, item: QueuedEvent) -> bool: + """Decides how to handle queued event. + + Returns `True` when event should be broadcasted, and `False` + otherwise. If the target is a partial node, the event is pushed + to the event polling buffer.""" + node_bundle = self.cache.read(item.target) if node_bundle: node_profile = node_bundle.validate_contents(NodeProfile) @@ -63,6 +71,7 @@ def decide_event(self, item: QueuedEvent) -> bool: return True elif node_profile.node_type == NodeType.PARTIAL: + # to be handled by poll event buffer self.poll_event_buf.push(item.target, item.event) return False @@ -72,7 +81,6 @@ def decide_event(self, item: QueuedEvent) -> bool: else: log.warning(f"Couldn't handle event {item.event!r} in queue, node {item.target!r} unknown to me") return False - def run(self): log.info("Started event worker") @@ -98,7 +106,7 @@ def run(self): self.buffer_times[item.target] = now event_buf.append(item.event) - + # When new events are dequeued, check buffer for max length if len(event_buf) >= self.max_buf_len: self.flush_buffer(item.target, event_buf) diff --git a/src/koi_net/workers/kobj_worker.py b/src/koi_net/workers/kobj_worker.py index 524e7cc..0123fd3 100644 --- a/src/koi_net/workers/kobj_worker.py +++ b/src/koi_net/workers/kobj_worker.py @@ -10,6 +10,8 @@ class KnowledgeProcessingWorker(ThreadWorker): + """Thread worker that processes the `kobj_queue`.""" + def __init__( self, kobj_queue: KobjQueue, @@ -41,4 +43,4 @@ def run(self): pass except Exception as e: - traceback.print_exc() \ No newline at end of file + traceback.print_exc() From ac4fb83c82f34850b7402c5fd92ef3cf3cfbf21f Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 18 Nov 2025 15:19:42 -0500 Subject: [PATCH 32/53] generalized poll event buffer, to event buffer including timing features for broadcast batching, added broadcast event buffer as an independent component, moved internal config for workers into config class, --- src/koi_net/config/core.py | 16 +++- src/koi_net/core.py | 5 +- src/koi_net/network/event_buffer.py | 53 +++++++++++ src/koi_net/network/poll_event_buffer.py | 29 ------ src/koi_net/network/response_handler.py | 6 +- src/koi_net/workers/event_worker.py | 108 +++++++++-------------- 6 files changed, 115 insertions(+), 102 deletions(-) create mode 100644 src/koi_net/network/event_buffer.py delete mode 100644 src/koi_net/network/poll_event_buffer.py diff --git a/src/koi_net/config/core.py b/src/koi_net/config/core.py index eaf0fa9..19eea94 100644 --- a/src/koi_net/config/core.py +++ b/src/koi_net/config/core.py @@ -11,6 +11,14 @@ log = structlog.stdlib.get_logger() +class EventWorkerConfig(BaseModel): + queue_timeout: float = 0.1 + max_buf_len: int = 5 + max_wait_time: float = 1.0 + +class KobjWorkerConfig(BaseModel): + queue_timeout: float = 0.1 + class NodeContact(BaseModel): rid: KoiNetNode | None = None url: str | None = None @@ -25,9 +33,11 @@ class KoiNetConfig(BaseModel): rid_types_of_interest: list[RIDType] = [KoiNetNode] cache_directory_path: str = ".rid_cache" - event_queues_path: str = "event_queues.json" private_key_pem_path: str = "priv_key.pem" + event_worker: EventWorkerConfig = EventWorkerConfig() + kobj_worker: KobjWorkerConfig = KobjWorkerConfig() + first_contact: NodeContact = NodeContact() class EnvConfig(BaseModel): @@ -64,10 +74,10 @@ class NodeConfig(BaseModel): @model_validator(mode="after") def generate_rid_cascade(self): """Generates node RID if missing.""" - if self.koi_net.node_rid: + if self.koi_net.node_rid and self.koi_net.node_profile.public_key: return self - log.debug("Node RID not found in config, attempting to generate") + log.debug("Node RID or public key not found in config, attempting to generate") try: # attempts to read existing private key PEM file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index ef535c3..5b0d3d1 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -17,7 +17,7 @@ from .network.request_handler import RequestHandler from .network.resolver import NetworkResolver from .network.response_handler import ResponseHandler -from .network.poll_event_buffer import PollEventBuffer +from .network.event_buffer import EventBuffer from .processor.pipeline import KnowledgePipeline from .processor.kobj_queue import KobjQueue from .secure import Secure @@ -38,7 +38,8 @@ class BaseNode(NodeAssembler): config_cls = NodeConfig kobj_queue = KobjQueue event_queue = EventQueue - poll_event_buf = PollEventBuffer + poll_event_buf = EventBuffer + broadcast_event_buf = EventBuffer config = ConfigLoader knowledge_handlers = [ basic_rid_handler, diff --git a/src/koi_net/network/event_buffer.py b/src/koi_net/network/event_buffer.py new file mode 100644 index 0000000..ebd021d --- /dev/null +++ b/src/koi_net/network/event_buffer.py @@ -0,0 +1,53 @@ +import time +from rid_lib.types import KoiNetNode + +from ..protocol.event import Event + + +class EventBuffer: + """Stores outgoing events intended for polling nodes.""" + buffers: dict[KoiNetNode, list[Event]] + start_time: dict[KoiNetNode, float] + + def __init__(self): + self.buffers = {} + self.start_time = {} + + def push(self, node: KoiNetNode, event: Event): + """Pushes event to specified node. + + Sets start time to now if unset. + """ + + if node not in self.buffers: + self.start_time[node] = time.time() + + event_buf = self.buffers.setdefault(node, []) + event_buf.append(event) + + def buf_len(self, node: KoiNetNode): + """Returns the length of a node's event buffer.""" + return len(self.buffers.get(node, [])) + + def flush(self, node: KoiNetNode, limit: int = 0) -> list[Event]: + """Flushes all (or limit) events for a node. + + Resets start time. + """ + + if node in self.start_time: + del self.start_time[node] + + if node not in self.buffers: + return [] + + event_buf = self.buffers[node] + + if limit and len(event_buf) > limit: + flushed_events = event_buf[:limit] + self.buffers[node] = event_buf[limit:] + else: + flushed_events = event_buf.copy() + del self.buffers[node] + + return flushed_events \ No newline at end of file diff --git a/src/koi_net/network/poll_event_buffer.py b/src/koi_net/network/poll_event_buffer.py deleted file mode 100644 index 6150c37..0000000 --- a/src/koi_net/network/poll_event_buffer.py +++ /dev/null @@ -1,29 +0,0 @@ -from rid_lib.types import KoiNetNode - -from ..protocol.event import Event - - -class PollEventBuffer: - """Stores outgoing events intended for polling nodes.""" - buffers: dict[KoiNetNode, list[Event]] - - def __init__(self): - self.buffers = dict() - - def push(self, node: KoiNetNode, event: Event): - """Pushes event to specified node.""" - event_buf = self.buffers.setdefault(node, []) - event_buf.append(event) - - def flush(self, node: KoiNetNode, limit: int = 0) -> list[Event]: - """Flushes all events for a given node, with an optional limit.""" - event_buf = self.buffers.get(node, []) - - if limit and len(event_buf) > limit: - flushed_events = event_buf[:limit] - self.buffers[node] = event_buf[limit:] - else: - flushed_events = event_buf.copy() - self.buffers[node] = [] - - return flushed_events \ No newline at end of file diff --git a/src/koi_net/network/response_handler.py b/src/koi_net/network/response_handler.py index 1abbb12..a2cf3f4 100644 --- a/src/koi_net/network/response_handler.py +++ b/src/koi_net/network/response_handler.py @@ -4,7 +4,7 @@ from rid_lib.ext import Manifest, Cache from rid_lib.ext.bundle import Bundle -from koi_net.network.poll_event_buffer import PollEventBuffer +from koi_net.network.event_buffer import EventBuffer from koi_net.processor.kobj_queue import KobjQueue from koi_net.protocol.consts import BROADCAST_EVENTS_PATH, FETCH_BUNDLES_PATH, FETCH_MANIFESTS_PATH, FETCH_RIDS_PATH, POLL_EVENTS_PATH from koi_net.protocol.envelope import SignedEnvelope @@ -29,13 +29,13 @@ class ResponseHandler: cache: Cache kobj_queue: KobjQueue - poll_event_buf: PollEventBuffer + poll_event_buf: EventBuffer def __init__( self, cache: Cache, kobj_queue: KobjQueue, - poll_event_buf: PollEventBuffer, + poll_event_buf: EventBuffer, secure: Secure ): self.cache = cache diff --git a/src/koi_net/workers/event_worker.py b/src/koi_net/workers/event_worker.py index 2e6af74..17ec4db 100644 --- a/src/koi_net/workers/event_worker.py +++ b/src/koi_net/workers/event_worker.py @@ -7,10 +7,9 @@ from rid_lib.types import KoiNetNode from ..config.core import NodeConfig -from ..network.event_queue import EventQueue, QueuedEvent +from ..network.event_queue import EventQueue from ..network.request_handler import RequestHandler -from ..network.poll_event_buffer import PollEventBuffer -from ..protocol.event import Event +from ..network.event_buffer import EventBuffer from ..protocol.node import NodeProfile, NodeType from .base import ThreadWorker, STOP_WORKER @@ -20,16 +19,14 @@ class EventProcessingWorker(ThreadWorker): """Thread worker that processes the `event_queue`.""" - event_buffer: dict[KoiNetNode, list[Event]] - buffer_times: dict[KoiNetNode, float] - def __init__( self, config: NodeConfig, cache: Cache, event_queue: EventQueue, request_handler: RequestHandler, - poll_event_buf: PollEventBuffer + poll_event_buf: EventBuffer, + broadcast_event_buf: EventBuffer ): self.event_queue = event_queue self.request_handler = request_handler @@ -37,89 +34,70 @@ def __init__( self.config = config self.cache = cache self.poll_event_buf = poll_event_buf - - self.timeout: float = 0.1 - self.max_buf_len: int = 5 - self.max_wait_time: float = 1.0 - - self.event_buffer = dict() - self.buffer_times = dict() + self.broadcast_event_buf = broadcast_event_buf super().__init__() - def flush_buffer(self, target: KoiNetNode, buffer: list[Event]): + def flush_and_broadcast(self, target: KoiNetNode): + events = self.broadcast_event_buf.flush(target) + + """Broadcasts all events to target in event buffer.""" try: - self.request_handler.broadcast_events(target, events=buffer) + self.request_handler.broadcast_events(target, events=events) except Exception as e: traceback.print_exc() - self.event_buffer[target] = [] - self.buffer_times[target] = None - - def decide_event(self, item: QueuedEvent) -> bool: - """Decides how to handle queued event. - - Returns `True` when event should be broadcasted, and `False` - otherwise. If the target is a partial node, the event is pushed - to the event polling buffer.""" - - node_bundle = self.cache.read(item.target) - if node_bundle: - node_profile = node_bundle.validate_contents(NodeProfile) - - if node_profile.node_type == NodeType.FULL: - return True - - elif node_profile.node_type == NodeType.PARTIAL: - # to be handled by poll event buffer - self.poll_event_buf.push(item.target, item.event) - return False - - elif item.target == self.config.koi_net.first_contact.rid: - return True - - else: - log.warning(f"Couldn't handle event {item.event!r} in queue, node {item.target!r} unknown to me") - return False - def run(self): log.info("Started event worker") while True: - now = time.time() try: - item = self.event_queue.q.get(timeout=self.timeout) + item = self.event_queue.q.get(timeout=self.config.koi_net.event_worker.queue_timeout) try: if item is STOP_WORKER: - log.info(f"Received 'STOP_WORKER' signal, flushing buffer...") - for target in self.event_buffer.keys(): - self.flush_buffer(target, self.event_buffer[target]) + log.info(f"Received 'STOP_WORKER' signal, flushing all buffers...") + for target in list(self.broadcast_event_buf.buffers.keys()): + self.flush_and_broadcast(target) return log.info(f"Dequeued {item.event!r} -> {item.target!r}") - if not self.decide_event(item): + # determines which buffer to push event to based on target node type + node_bundle = self.cache.read(item.target) + if node_bundle: + node_profile = node_bundle.validate_contents(NodeProfile) + + if node_profile.node_type == NodeType.FULL: + self.broadcast_event_buf.push(item.target, item.event) + + elif node_profile.node_type == NodeType.PARTIAL: + self.poll_event_buf.push(item.target, item.event) + continue + + elif item.target == self.config.koi_net.first_contact.rid: + self.broadcast_event_buf.push(item.target, item.event) + + else: + log.warning(f"Couldn't handle event {item.event!r} in queue, node {item.target!r} unknown to me") continue - - event_buf = self.event_buffer.setdefault(item.target, []) - if not event_buf: - self.buffer_times[item.target] = now - - event_buf.append(item.event) - - # When new events are dequeued, check buffer for max length - if len(event_buf) >= self.max_buf_len: - self.flush_buffer(item.target, event_buf) + + if self.broadcast_event_buf.buf_len(item.target) > self.config.koi_net.event_worker.max_buf_len: + self.flush_and_broadcast(target) + finally: self.event_queue.q.task_done() except queue.Empty: # On timeout, check all buffers for max wait time - for target, event_buf in self.event_buffer.items(): - if (len(event_buf) == 0) or (self.buffer_times.get(target) is None): + for target in list(self.broadcast_event_buf.buffers.keys()): + start_time = self.broadcast_event_buf.start_time.get(target) + now = time.time() + + if (start_time is None) or (self.broadcast_event_buf.buf_len(target) == 0): continue - if (now - self.buffer_times[target]) >= self.max_wait_time: - self.flush_buffer(target, event_buf) + if (now - start_time) >= self.config.koi_net.event_worker.max_wait_time: + self.flush_and_broadcast(target) + except Exception as e: traceback.print_exc() \ No newline at end of file From 56fe7e316a5fbde59127675a0dee9f6911b35f20 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 18 Nov 2025 16:22:09 -0500 Subject: [PATCH 33/53] removed unused default effector actions, renamed secure to secure manager, restructured sync manager and catch up call in lifecycle, added more docstrings --- src/koi_net/assembler.py | 7 ++--- src/koi_net/core.py | 4 +-- src/koi_net/default_actions.py | 23 --------------- src/koi_net/effector.py | 4 +-- src/koi_net/handshaker.py | 3 ++ src/koi_net/identity.py | 4 +-- src/koi_net/lifecycle.py | 27 ++++++++--------- src/koi_net/log_system.py | 2 ++ src/koi_net/network/request_handler.py | 12 ++++---- src/koi_net/network/response_handler.py | 10 +++---- src/koi_net/{secure.py => secure_manager.py} | 26 +--------------- src/koi_net/sync_manager.py | 31 ++++++-------------- 12 files changed, 48 insertions(+), 105 deletions(-) delete mode 100644 src/koi_net/default_actions.py rename src/koi_net/{secure.py => secure_manager.py} (81%) diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 1a54f80..8c51cca 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -1,10 +1,10 @@ -from enum import StrEnum import inspect +from enum import StrEnum from typing import Any, Protocol from dataclasses import make_dataclass -from pydantic import BaseModel import structlog +from pydantic import BaseModel from .entrypoints.base import EntryPoint @@ -38,6 +38,7 @@ def __new__(cls, name: str, bases: tuple, dct: dict[str]): return cls class NodeContainer(Protocol): + """Dummy 'shape' for node containers built by assembler.""" entrypoint = EntryPoint class NodeAssembler(metaclass=BuildOrderer): @@ -79,8 +80,6 @@ def _build_deps( dep_graph[comp_name] = (comp_type, dep_names) - # print(f"{comp_name} ({comp_type}) -> {dep_names}") - return dep_graph @classmethod diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 5b0d3d1..bdcbebb 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -20,7 +20,7 @@ from .network.event_buffer import EventBuffer from .processor.pipeline import KnowledgePipeline from .processor.kobj_queue import KobjQueue -from .secure import Secure +from .secure_manager import SecureManager from .entrypoints import NodeServer, NodePoller from .processor.knowledge_handlers import ( basic_manifest_handler, @@ -54,7 +54,7 @@ class BaseNode(NodeAssembler): directory_path=config.koi_net.cache_directory_path) identity = NodeIdentity graph = NetworkGraph - secure = Secure + secure_manager = SecureManager handshaker = Handshaker error_handler = ErrorHandler request_handler = RequestHandler diff --git a/src/koi_net/default_actions.py b/src/koi_net/default_actions.py deleted file mode 100644 index 52d1a56..0000000 --- a/src/koi_net/default_actions.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Implementations of default dereference actions.""" - -from rid_lib.types import KoiNetNode -from rid_lib.ext import Bundle -from .effector import Effector, ActionContext - - -@Effector.register_default_action(KoiNetNode) -def dereference_koi_node( - ctx: ActionContext, rid: KoiNetNode -) -> Bundle | None: - """Dereference function for this KOI node. - - Generates a bundle from this node's profile data in the config. - """ - - if rid != ctx.identity.rid: - return - - return Bundle.generate( - rid=ctx.identity.rid, - contents=ctx.identity.profile.model_dump() - ) \ No newline at end of file diff --git a/src/koi_net/effector.py b/src/koi_net/effector.py index 16b1449..acd0a30 100644 --- a/src/koi_net/effector.py +++ b/src/koi_net/effector.py @@ -32,8 +32,8 @@ class Effector: cache: Cache resolver: NetworkResolver - kobj_queue: KobjQueue | None - action_context: ActionContext | None + kobj_queue: KobjQueue + action_context: ActionContext _action_table: dict[ type[RID], Callable[ diff --git a/src/koi_net/handshaker.py b/src/koi_net/handshaker.py index f738446..bea8511 100644 --- a/src/koi_net/handshaker.py +++ b/src/koi_net/handshaker.py @@ -9,6 +9,7 @@ class Handshaker: + """Handles handshaking with other nodes.""" def __init__( self, cache: Cache, @@ -21,9 +22,11 @@ def __init__( def handshake_with(self, target: KoiNetNode): """Initiates a handshake with target node. + Pushes successive `FORGET` and `NEW` events to target node to reset the target's cache in case it already knew this node. """ + log.debug(f"Initiating handshake with {target}") self.event_queue.push( Event.from_rid( diff --git a/src/koi_net/identity.py b/src/koi_net/identity.py index 38e1a8e..e4ca08b 100644 --- a/src/koi_net/identity.py +++ b/src/koi_net/identity.py @@ -9,7 +9,7 @@ class NodeIdentity: """Represents a node's identity (RID, profile).""" - config: NodeConfig + config: NodeConfig def __init__(self, config: NodeConfig): self.config = config @@ -20,4 +20,4 @@ def rid(self) -> KoiNetNode: @property def profile(self) -> NodeProfile: - return self.config.koi_net.node_profile \ No newline at end of file + return self.config.koi_net.node_profile diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index 7d1df8a..77aabbe 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -2,6 +2,7 @@ from contextlib import contextmanager, asynccontextmanager from rid_lib.ext import Bundle +from rid_lib.types import KoiNetNode from .sync_manager import SyncManager from .handshaker import Handshaker @@ -90,31 +91,29 @@ def start(self): self.event_worker.thread.start() self.graph.generate() - # refresh to reflect changes (if any) in config.yaml - + # refresh to reflect changes (if any) in config.yaml node profile self.kobj_queue.push(bundle=Bundle.generate( rid=self.identity.rid, contents=self.identity.profile.model_dump() )) - log.debug("Waiting for kobj queue to empty") self.kobj_queue.q.join() - if self.sync_manager.catch_up_with_coordinators(): - pass + node_providers = self.graph.get_neighbors( + direction="in", + allowed_type=[KoiNetNode] + ) + + if node_providers: + log.debug(f"Catching up with `orn:koi-net.node` providers: {node_providers}") + self.sync_manager.catch_up_with(node_providers, [KoiNetNode]) elif self.config.koi_net.first_contact.rid: - log.debug(f"I don't have any edges with coordinators, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") - + log.debug(f"No edges with `orn:koi-net.node` providers, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") self.handshaker.handshake_with(self.config.koi_net.first_contact.rid) - - + def stop(self): - """Stops a node. - - Finishes processing knowledge object queue. - """ - log.info(f"Waiting for kobj queue to empty ({self.kobj_queue.q.unfinished_tasks} tasks remaining)") + """Stops a node, send stop signals to workers.""" self.kobj_queue.q.put(STOP_WORKER) self.event_queue.q.put(STOP_WORKER) \ No newline at end of file diff --git a/src/koi_net/log_system.py b/src/koi_net/log_system.py index 44e680d..3c4cad2 100644 --- a/src/koi_net/log_system.py +++ b/src/koi_net/log_system.py @@ -87,6 +87,8 @@ ) class LogSystem: + """Handles initializing the logging system.""" + def __init__(self): file_handler = RotatingFileHandler( filename="log.ndjson", diff --git a/src/koi_net/network/request_handler.py b/src/koi_net/network/request_handler.py index a999cde..0f58658 100644 --- a/src/koi_net/network/request_handler.py +++ b/src/koi_net/network/request_handler.py @@ -27,7 +27,7 @@ ) from ..protocol.node import NodeProfile, NodeType from ..protocol.model_map import API_MODEL_MAP -from ..secure import Secure +from ..secure_manager import SecureManager from .error_handler import ErrorHandler log = structlog.stdlib.get_logger() @@ -58,19 +58,19 @@ class RequestHandler: cache: Cache identity: NodeIdentity - secure: Secure + secure_manager: SecureManager error_handler: ErrorHandler def __init__( self, cache: Cache, identity: NodeIdentity, - secure: Secure, + secure_manager: SecureManager, error_handler: ErrorHandler ): self.cache = cache self.identity = identity - self.secure = secure + self.secure_manager = secure_manager self.error_handler = error_handler def get_base_url(self, node_rid: KoiNetNode) -> str: @@ -105,7 +105,7 @@ def make_request( url = self.get_base_url(node) + path log.info(f"Making request to {url}") - signed_envelope = self.secure.create_envelope( + signed_envelope = self.secure_manager.create_envelope( payload=request, target=node ) @@ -131,7 +131,7 @@ def make_request( return resp_envelope = resp_env_model.model_validate_json(result.text) - self.secure.validate_envelope(resp_envelope) + self.secure_manager.validate_envelope(resp_envelope) return resp_envelope.payload diff --git a/src/koi_net/network/response_handler.py b/src/koi_net/network/response_handler.py index a2cf3f4..2e3144b 100644 --- a/src/koi_net/network/response_handler.py +++ b/src/koi_net/network/response_handler.py @@ -8,7 +8,7 @@ from koi_net.processor.kobj_queue import KobjQueue from koi_net.protocol.consts import BROADCAST_EVENTS_PATH, FETCH_BUNDLES_PATH, FETCH_MANIFESTS_PATH, FETCH_RIDS_PATH, POLL_EVENTS_PATH from koi_net.protocol.envelope import SignedEnvelope -from koi_net.secure import Secure +from koi_net.secure_manager import SecureManager from ..protocol.api_models import ( EventsPayload, @@ -36,15 +36,15 @@ def __init__( cache: Cache, kobj_queue: KobjQueue, poll_event_buf: EventBuffer, - secure: Secure + secure_manager: SecureManager ): self.cache = cache self.kobj_queue = kobj_queue self.poll_event_buf = poll_event_buf - self.secure = secure + self.secure_manager = secure_manager def handle_response(self, path: str, req: SignedEnvelope): - self.secure.validate_envelope(req) + self.secure_manager.validate_envelope(req) response_map = { BROADCAST_EVENTS_PATH: self.broadcast_events_handler, @@ -59,7 +59,7 @@ def handle_response(self, path: str, req: SignedEnvelope): if response is None: return - return self.secure.create_envelope( + return self.secure_manager.create_envelope( payload=response, target=req.source_node ) diff --git a/src/koi_net/secure.py b/src/koi_net/secure_manager.py similarity index 81% rename from src/koi_net/secure.py rename to src/koi_net/secure_manager.py index 8c73198..c2bc90b 100644 --- a/src/koi_net/secure.py +++ b/src/koi_net/secure_manager.py @@ -1,5 +1,4 @@ import structlog -from functools import wraps import cryptography.exceptions from rid_lib.ext import Bundle, Cache from rid_lib.ext.utils import sha256_hash @@ -22,7 +21,7 @@ log = structlog.stdlib.get_logger() -class Secure: +class SecureManager: """Subsystem handling secure protocol logic.""" identity: NodeIdentity cache: Cache @@ -112,27 +111,4 @@ def validate_envelope(self, envelope: SignedEnvelope): # check that this node is the target of the envelope if envelope.target_node != self.identity.rid: raise InvalidTargetError(f"Envelope target {envelope.target_node!r} is not me") - - def envelope_handler(self, func): - """Wrapper function validates envelopes for server endpoints. - - Validates incoming envelope and passes payload to endpoint - handler. Resulting payload is returned as a signed envelope. - """ - @wraps(func) - async def wrapper(req: SignedEnvelope, *args, **kwargs) -> SignedEnvelope | None: - log.info("Validating envelope") - - self.validate_envelope(req) - log.info("Calling endpoint handler") - - result = await func(req, *args, **kwargs) - - if result is not None: - log.info("Creating response envelope") - return self.create_envelope( - payload=result, - target=req.source_node - ) - return wrapper diff --git a/src/koi_net/sync_manager.py b/src/koi_net/sync_manager.py index 74b6f62..c53c398 100644 --- a/src/koi_net/sync_manager.py +++ b/src/koi_net/sync_manager.py @@ -1,14 +1,14 @@ from rid_lib.ext import Cache -from rid_lib.types import KoiNetNode -from koi_net.network.graph import NetworkGraph -from koi_net.network.request_handler import RequestHandler -from koi_net.processor.kobj_queue import KobjQueue +from .network.graph import NetworkGraph +from .network.request_handler import RequestHandler +from .processor.kobj_queue import KobjQueue from .protocol.api_models import ErrorResponse from .protocol.node import NodeProfile, NodeType class SyncManager: + """Handles state synchronization actions with other nodes.""" graph: NetworkGraph cache: Cache request_handler: RequestHandler @@ -25,26 +25,15 @@ def __init__( self.cache = cache self.request_handler = request_handler self.kobj_queue = kobj_queue - - def catch_up_with_coordinators(self) -> bool: - return self.catch_up_with( - nodes=self.graph.get_neighbors( - direction="in", - allowed_type=KoiNetNode - ), - rid_types=[KoiNetNode] - ) - def catch_up_with(self, nodes, rid_types) -> bool: - # get all of the nodes such that, (node) -[orn:koi-net.node]-> (me) - # node providers that I am subscribed to - if not nodes: - return False - + def catch_up_with(self, nodes, rid_types): + """Catches up with the state of RID types within other nodes.""" + for node in nodes: node_bundle = self.cache.read(node) node_profile = node_bundle.validate_contents(NodeProfile) + # can't catch up with partial nodes if node_profile.node_type != NodeType.FULL: continue @@ -58,6 +47,4 @@ def catch_up_with(self, nodes, rid_types) -> bool: self.kobj_queue.push( manifest=manifest, source=node - ) - - return True \ No newline at end of file + ) \ No newline at end of file From 3857f10cdd70e5de6288759c3deefa7875cfdda9 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 18 Nov 2025 17:25:13 -0500 Subject: [PATCH 34/53] refactored effector to work similar to the processing pipeline, accepts a list of deref handlers, made public key overwrite warning more specific, gave effector deref handlers access to handler context, readded post init injection to effector --- src/koi_net/assembler.py | 2 +- src/koi_net/config/core.py | 4 +- src/koi_net/core.py | 3 +- src/koi_net/effector.py | 99 +++++++++++++------------------- src/koi_net/processor/context.py | 15 +++-- 5 files changed, 57 insertions(+), 66 deletions(-) diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 8c51cca..40c159e 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -41,7 +41,7 @@ class NodeContainer(Protocol): """Dummy 'shape' for node containers built by assembler.""" entrypoint = EntryPoint -class NodeAssembler(metaclass=BuildOrderer): +class NodeAssembler(metaclass=BuildOrderer): def __new__(self) -> NodeContainer: """Returns assembled node container.""" return self._build() diff --git a/src/koi_net/config/core.py b/src/koi_net/config/core.py index 19eea94..a1ccd89 100644 --- a/src/koi_net/config/core.py +++ b/src/koi_net/config/core.py @@ -101,7 +101,9 @@ def generate_rid_cascade(self): log.debug(f"Node RID set to {self.koi_net.node_rid}") if self.koi_net.node_profile.public_key != pub_key.to_der(): + if self.koi_net.node_profile.public_key: + log.warning("New private key overwriting old public key!") + self.koi_net.node_profile.public_key = pub_key.to_der() - log.warning("New private key overwrote old public key!") return self \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index bdcbebb..11bbae6 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -50,6 +50,7 @@ class BaseNode(NodeAssembler): basic_network_output_filter, forget_edge_on_node_deletion ] + deref_handlers = [] cache = lambda config: Cache( directory_path=config.koi_net.cache_directory_path) identity = NodeIdentity @@ -61,8 +62,8 @@ class BaseNode(NodeAssembler): sync_manager = SyncManager response_handler = ResponseHandler resolver = NetworkResolver - effector = Effector handler_context = HandlerContext + effector = Effector pipeline = KnowledgePipeline kobj_worker = KnowledgeProcessingWorker event_worker = EventProcessingWorker diff --git a/src/koi_net/effector.py b/src/koi_net/effector.py index acd0a30..3b31dd9 100644 --- a/src/koi_net/effector.py +++ b/src/koi_net/effector.py @@ -1,27 +1,33 @@ -import structlog +from dataclasses import dataclass from typing import Callable from enum import StrEnum + +import structlog from rid_lib.ext import Cache, Bundle from rid_lib.core import RID, RIDType from rid_lib.types import KoiNetNode + +from .processor.context import HandlerContext from .network.resolver import NetworkResolver from .processor.kobj_queue import KobjQueue -from .identity import NodeIdentity log = structlog.stdlib.get_logger() - -class ActionContext: - """Provides action handlers access to other subsystems.""" +@dataclass +class DerefHandler: + func: Callable[[HandlerContext, RID], Bundle | None] + rid_types: tuple[RIDType] - identity: NodeIdentity - - def __init__( - self, - identity: NodeIdentity, - ): - self.identity = identity + def __call__(self, ctx: HandlerContext, rid: RID) -> Bundle | None: + return self.func(ctx, rid) + @classmethod + def create(cls, rid_types: tuple[RIDType]): + def decorator(func: Callable) -> DerefHandler: + handler = cls(func, rid_types) + return handler + return decorator + class BundleSource(StrEnum): CACHE = "CACHE" @@ -33,50 +39,23 @@ class Effector: cache: Cache resolver: NetworkResolver kobj_queue: KobjQueue - action_context: ActionContext - _action_table: dict[ - type[RID], - Callable[ - [ActionContext, RID], - Bundle | None - ] - ] = dict() + handler_context: HandlerContext def __init__( self, cache: Cache, resolver: NetworkResolver, kobj_queue: KobjQueue, - identity: NodeIdentity + handler_context: HandlerContext, + deref_handlers: list[DerefHandler] ): self.cache = cache self.resolver = resolver self.kobj_queue = kobj_queue - self.action_context = ActionContext(identity) - self._action_table = self.__class__._action_table.copy() - - @classmethod - def register_default_action(cls, rid_type: RIDType): - def decorator(func: Callable) -> Callable: - cls._action_table[rid_type] = func - return func - return decorator - - def register_action(self, rid_type: RIDType): - """Registers a new dereference action for an RID type. + self.handler_context = handler_context + self.deref_handlers = deref_handlers - Example: - This function should be used as a decorator on an action function:: - - @node.register_action(KoiNetNode) - def deref_koi_net_node(ctx: ActionContext, rid: KoiNetNode): - # return a Bundle or None - return - """ - def decorator(func: Callable) -> Callable: - self._action_table[rid_type] = func - return func - return decorator + self.handler_context.set_effector(self) def _try_cache(self, rid: RID) -> tuple[Bundle, BundleSource] | None: bundle = self.cache.read(rid) @@ -87,18 +66,20 @@ def _try_cache(self, rid: RID) -> tuple[Bundle, BundleSource] | None: else: log.debug("Cache miss") return None - + def _try_action(self, rid: RID) -> tuple[Bundle, BundleSource] | None: - if type(rid) not in self._action_table: - log.debug("No action available") + action = None + for handler in self.deref_handlers: + if type(rid) not in handler.rid_types: + continue + action = handler + break + + if not action: + log.debug("No action found") return None - log.debug("Action available") - func = self._action_table[type(rid)] - bundle = func( - ctx=self.action_context, - rid=rid - ) + bundle = action(ctx=self.handler_context, rid=rid) if bundle: log.debug("Action hit") @@ -106,7 +87,6 @@ def _try_action(self, rid: RID) -> tuple[Bundle, BundleSource] | None: else: log.debug("Action miss") return None - def _try_network(self, rid: RID) -> tuple[Bundle, KoiNetNode] | None: bundle, source = self.resolver.fetch_remote_bundle(rid) @@ -118,13 +98,13 @@ def _try_network(self, rid: RID) -> tuple[Bundle, KoiNetNode] | None: log.debug("Network miss") return None - def deref( self, rid: RID, refresh_cache: bool = False, use_network: bool = False, - handle_result: bool = True + handle_result: bool = True, + write_through: bool = False ) -> Bundle | None: """Dereferences an RID. @@ -136,7 +116,7 @@ def deref( rid: RID to dereference refresh_cache: skips cache read when `True` use_network: enables fetching from other nodes when `True` - handle_result: handles resulting bundle with knowledge pipeline when `True` + handle_result: sends resulting bundle to kobj queue when `True` """ log.debug(f"Dereferencing {rid!r}") @@ -159,6 +139,9 @@ def deref( bundle=bundle, source=source if type(source) is KoiNetNode else None ) + + if write_through: + self.kobj_queue.q.join() # TODO: refactor for general solution, param to write through to cache before continuing # like `self.processor.kobj_queue.join()`` diff --git a/src/koi_net/processor/context.py b/src/koi_net/processor/context.py index cdd3870..5de2c08 100644 --- a/src/koi_net/processor/context.py +++ b/src/koi_net/processor/context.py @@ -1,7 +1,7 @@ +from typing import TYPE_CHECKING from rid_lib.ext import Cache -from koi_net.effector import Effector -from koi_net.network.resolver import NetworkResolver +from ..network.resolver import NetworkResolver from ..config.core import NodeConfig from ..network.graph import NetworkGraph from ..network.event_queue import EventQueue @@ -9,6 +9,9 @@ from ..identity import NodeIdentity from .kobj_queue import KobjQueue +if TYPE_CHECKING: + from ..effector import Effector + class HandlerContext: """Context object provides knowledge handlers access to other components.""" @@ -21,7 +24,7 @@ class HandlerContext: graph: NetworkGraph request_handler: RequestHandler resolver: NetworkResolver - effector: Effector + effector: "Effector" def __init__( self, @@ -32,8 +35,7 @@ def __init__( kobj_queue: KobjQueue, graph: NetworkGraph, request_handler: RequestHandler, - resolver: NetworkResolver, - effector: Effector + resolver: NetworkResolver ): self.identity = identity self.config = config @@ -43,4 +45,7 @@ def __init__( self.graph = graph self.request_handler = request_handler self.resolver = resolver + + def set_effector(self, effector: "Effector"): + """Post initialization injection of effector component.""" self.effector = effector \ No newline at end of file From aae753f3fec448f4b6040b506443d73fb130e419 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 18 Nov 2025 17:51:00 -0500 Subject: [PATCH 35/53] hacked type annotation return from node assembler to give a better dynamic representation of the node container to the type checker, added write through option on deref function --- src/koi_net/assembler.py | 4 +-- src/koi_net/core.py | 57 ++++++++++++++++---------------- src/koi_net/effector.py | 6 ++-- src/koi_net/processor/handler.py | 4 +-- 4 files changed, 35 insertions(+), 36 deletions(-) diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 40c159e..46237d6 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -1,6 +1,6 @@ import inspect from enum import StrEnum -from typing import Any, Protocol +from typing import Any, Protocol, Self from dataclasses import make_dataclass import structlog @@ -42,7 +42,7 @@ class NodeContainer(Protocol): entrypoint = EntryPoint class NodeAssembler(metaclass=BuildOrderer): - def __new__(self) -> NodeContainer: + def __new__(self) -> Self: """Returns assembled node container.""" return self._build() diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 11bbae6..ecb033a 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -5,7 +5,7 @@ from .assembler import NodeAssembler from .config.core import NodeConfig from .processor.context import HandlerContext -from .effector import Effector +from .effector import DerefHandler, Effector from .handshaker import Handshaker from .sync_manager import SyncManager from .identity import NodeIdentity @@ -20,6 +20,7 @@ from .network.event_buffer import EventBuffer from .processor.pipeline import KnowledgePipeline from .processor.kobj_queue import KobjQueue +from .processor.handler import KnowledgeHandler from .secure_manager import SecureManager from .entrypoints import NodeServer, NodePoller from .processor.knowledge_handlers import ( @@ -34,14 +35,14 @@ class BaseNode(NodeAssembler): - log_system = LogSystem - config_cls = NodeConfig - kobj_queue = KobjQueue - event_queue = EventQueue - poll_event_buf = EventBuffer - broadcast_event_buf = EventBuffer - config = ConfigLoader - knowledge_handlers = [ + log_system: LogSystem = LogSystem + config_cls: NodeConfig = NodeConfig + kobj_queue: KobjQueue = KobjQueue + event_queue: EventQueue = EventQueue + poll_event_buf: EventBuffer = EventBuffer + broadcast_event_buf: EventBuffer = EventBuffer + config: ConfigLoader = ConfigLoader + knowledge_handlers: list[KnowledgeHandler] = [ basic_rid_handler, basic_manifest_handler, secure_profile_handler, @@ -50,27 +51,27 @@ class BaseNode(NodeAssembler): basic_network_output_filter, forget_edge_on_node_deletion ] - deref_handlers = [] - cache = lambda config: Cache( + deref_handlers: list[DerefHandler] = [] + cache: Cache = lambda config: Cache( directory_path=config.koi_net.cache_directory_path) - identity = NodeIdentity - graph = NetworkGraph - secure_manager = SecureManager - handshaker = Handshaker - error_handler = ErrorHandler - request_handler = RequestHandler - sync_manager = SyncManager - response_handler = ResponseHandler - resolver = NetworkResolver - handler_context = HandlerContext - effector = Effector - pipeline = KnowledgePipeline - kobj_worker = KnowledgeProcessingWorker - event_worker = EventProcessingWorker - lifecycle = NodeLifecycle + identity: NodeIdentity = NodeIdentity + graph: NetworkGraph = NetworkGraph + secure_manager: SecureManager = SecureManager + handshaker: Handshaker = Handshaker + error_handler: ErrorHandler = ErrorHandler + request_handler: RequestHandler = RequestHandler + sync_manager: SyncManager = SyncManager + response_handler: ResponseHandler = ResponseHandler + resolver: NetworkResolver = NetworkResolver + handler_context: HandlerContext = HandlerContext + effector: Effector = Effector + pipeline: KnowledgePipeline = KnowledgePipeline + kobj_worker: KnowledgeProcessingWorker = KnowledgeProcessingWorker + event_worker: EventProcessingWorker = EventProcessingWorker + lifecycle: NodeLifecycle = NodeLifecycle class FullNode(BaseNode): - entrypoint = NodeServer + entrypoint: NodeServer = NodeServer class PartialNode(BaseNode): - entrypoint = NodePoller \ No newline at end of file + entrypoint: NodePoller = NodePoller \ No newline at end of file diff --git a/src/koi_net/effector.py b/src/koi_net/effector.py index 3b31dd9..e589cd4 100644 --- a/src/koi_net/effector.py +++ b/src/koi_net/effector.py @@ -117,6 +117,7 @@ def deref( refresh_cache: skips cache read when `True` use_network: enables fetching from other nodes when `True` handle_result: sends resulting bundle to kobj queue when `True` + write_through: waits for kobj queue to empty when `True` """ log.debug(f"Dereferencing {rid!r}") @@ -142,8 +143,5 @@ def deref( if write_through: self.kobj_queue.q.join() - - # TODO: refactor for general solution, param to write through to cache before continuing - # like `self.processor.kobj_queue.join()`` - + return bundle \ No newline at end of file diff --git a/src/koi_net/processor/handler.py b/src/koi_net/processor/handler.py index b7704a5..528d4cd 100644 --- a/src/koi_net/processor/handler.py +++ b/src/koi_net/processor/handler.py @@ -37,8 +37,8 @@ class KnowledgeHandler: func: Callable[[HandlerContext, KnowledgeObject], None | KnowledgeObject | StopChain] handler_type: HandlerType - rid_types: tuple[RIDType] - event_types: tuple[EventType | None] + rid_types: tuple[RIDType] = () + event_types: tuple[EventType | None] = () def __call__( self, From ac3e70f0725f3d8e2cf6d9162232902c0e98da09 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 19 Nov 2025 10:21:57 -0500 Subject: [PATCH 36/53] version bump, tentative release -> beta.4 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9495bbc..239977f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "koi-net" -version = "1.2.0b3" +version = "1.2.0b4" description = "Implementation of KOI-net protocol in Python" authors = [ {name = "Luke Miller", email = "luke@block.science"} From 65cd2169282d1870c880672857f2c236966b4e13 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 19 Nov 2025 14:23:35 -0500 Subject: [PATCH 37/53] spun out config proxy class from config loader, renamed config cls to config schema, better separation of concerns for config system --- src/koi_net/assembler.py | 1 + src/koi_net/config/core.py | 4 +-- src/koi_net/config/loader.py | 46 ++++++++++++++++++-------------- src/koi_net/config/proxy.py | 20 ++++++++++++++ src/koi_net/core.py | 8 +++--- src/koi_net/processor/context.py | 4 +++ 6 files changed, 58 insertions(+), 25 deletions(-) create mode 100644 src/koi_net/config/proxy.py diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 46237d6..9cc7234 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -42,6 +42,7 @@ class NodeContainer(Protocol): entrypoint = EntryPoint class NodeAssembler(metaclass=BuildOrderer): + # Self annotation lying to type checker to reflect typing set in node blueprints def __new__(self) -> Self: """Returns assembled node container.""" return self._build() diff --git a/src/koi_net/config/core.py b/src/koi_net/config/core.py index a1ccd89..d95747b 100644 --- a/src/koi_net/config/core.py +++ b/src/koi_net/config/core.py @@ -45,9 +45,9 @@ class EnvConfig(BaseModel): Values set in the config are the variables names, and are loaded from the environment at runtime. For example, if the config YAML - sets `priv_key_password: PRIV_KEY_PASSWORD` accessing + sets `priv_key_password: "PRIV_KEY_PASSWORD"` accessing `priv_key_password` would retrieve the value of `PRIV_KEY_PASSWORD` - from the environment. + from the environment variables. """ priv_key_password: str = "PRIV_KEY_PASSWORD" diff --git a/src/koi_net/config/loader.py b/src/koi_net/config/loader.py index d653541..08ee9d2 100644 --- a/src/koi_net/config/loader.py +++ b/src/koi_net/config/loader.py @@ -1,48 +1,54 @@ from ruamel.yaml import YAML + +from koi_net.config.proxy import ConfigProxy from .core import NodeConfig class ConfigLoader: """Loads node config from a YAML file, and proxies access to it.""" - _config: NodeConfig - _file_path: str = "config.yaml" - _file_content: str + file_path: str = "config.yaml" + file_content: str - def __init__(self, config_cls: type[NodeConfig]): - self.load_from_yaml(config_cls) + config_schema: type[NodeConfig] + proxy: ConfigProxy - def __getattr__(self, name): - """Proxies attribute lookups to internal config object.""" - return getattr(self._config, name) + def __init__( + self, + config_schema: type[NodeConfig], + config: ConfigProxy + ): + self.config_schema = config_schema + self.proxy = config + self.load_from_yaml() + self.save_to_yaml() - def load_from_yaml(self, config_cls: type[NodeConfig]): + def load_from_yaml(self): """Loads config from YAML file, or generates it if missing.""" yaml = YAML() try: - with open(self._file_path, "r") as f: - self._file_content = f.read() - config_data = yaml.load(self._file_content) - self._config = config_cls.model_validate(config_data) + with open(self.file_path, "r") as f: + self.file_content = f.read() + config_data = yaml.load(self.file_content) + self.proxy._config = self.config_schema.model_validate(config_data) except FileNotFoundError: - self._config = config_cls() - - self.save_to_yaml() + self.proxy._config = self.config_schema() def save_to_yaml(self): """Saves config to YAML file.""" yaml = YAML() - with open(self._file_path, "w") as f: + with open(self.file_path, "w") as f: try: - config_data = self._config.model_dump(mode="json") + config_data = self.proxy._config.model_dump(mode="json") yaml.dump(config_data, f) + except Exception as e: # rewrites original content if YAML dump fails - if self._file_content: + if self.file_content: f.seek(0) f.truncate() - f.write(self._file_content) + f.write(self.file_content) raise e \ No newline at end of file diff --git a/src/koi_net/config/proxy.py b/src/koi_net/config/proxy.py new file mode 100644 index 0000000..ad0d94b --- /dev/null +++ b/src/koi_net/config/proxy.py @@ -0,0 +1,20 @@ +from koi_net.config.core import NodeConfig + + +class ConfigProxy: + """Proxy for config access. + + Allows initialization of this component, and updating state without + destroying the original reference. Handled as if it were a config + model by other classes, loaded and saved by the `ConfigLoader`. + """ + _config: NodeConfig + + def __init__(self): + self._config = None + + def __getattr__(self, name): + if not self._config: + raise Exception("Proxy called before config loaded") + + return getattr(self._config, name) \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index ecb033a..3faaf17 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,9 +1,10 @@ from rid_lib.ext import Cache from .log_system import LogSystem -from .config.loader import ConfigLoader from .assembler import NodeAssembler from .config.core import NodeConfig +from .config.proxy import ConfigProxy +from .config.loader import ConfigLoader from .processor.context import HandlerContext from .effector import DerefHandler, Effector from .handshaker import Handshaker @@ -36,12 +37,13 @@ class BaseNode(NodeAssembler): log_system: LogSystem = LogSystem - config_cls: NodeConfig = NodeConfig kobj_queue: KobjQueue = KobjQueue event_queue: EventQueue = EventQueue poll_event_buf: EventBuffer = EventBuffer broadcast_event_buf: EventBuffer = EventBuffer - config: ConfigLoader = ConfigLoader + config_schema = NodeConfig + config: NodeConfig = ConfigProxy + config_loader: ConfigLoader = ConfigLoader knowledge_handlers: list[KnowledgeHandler] = [ basic_rid_handler, basic_manifest_handler, diff --git a/src/koi_net/processor/context.py b/src/koi_net/processor/context.py index 5de2c08..f7cac21 100644 --- a/src/koi_net/processor/context.py +++ b/src/koi_net/processor/context.py @@ -3,6 +3,7 @@ from ..network.resolver import NetworkResolver from ..config.core import NodeConfig +from ..config.loader import ConfigLoader from ..network.graph import NetworkGraph from ..network.event_queue import EventQueue from ..network.request_handler import RequestHandler @@ -18,6 +19,7 @@ class HandlerContext: identity: NodeIdentity config: NodeConfig + config_loader: ConfigLoader cache: Cache event_queue: EventQueue kobj_queue: KobjQueue @@ -30,6 +32,7 @@ def __init__( self, identity: NodeIdentity, config: NodeConfig, + config_loader: ConfigLoader, cache: Cache, event_queue: EventQueue, kobj_queue: KobjQueue, @@ -39,6 +42,7 @@ def __init__( ): self.identity = identity self.config = config + self.config_loader = config_loader self.cache = cache self.event_queue = event_queue self.kobj_queue = kobj_queue From c454822252d44fb5f0cb2b2134b2bce329761d0a Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 19 Nov 2025 19:04:47 -0500 Subject: [PATCH 38/53] config_cls -> config_schema, switched assembler functions back to chain calls, implemented cache directly with config dependency (necessary for new config system), moved all code execution out of init functions, lifecycle start now calls config loader, and loads priv key to secure manager, removed lifespan from fastapi app, now done using lifecycle context manager in server class to allow for config loading before uvicorn starts, added kobj worker config --- examples/coordinator.py | 2 +- examples/partial.py | 2 +- src/koi_net/assembler.py | 34 +++++-------- src/koi_net/cache.py | 81 ++++++++++++++++++++++++++++++ src/koi_net/config/loader.py | 2 - src/koi_net/core.py | 7 +-- src/koi_net/entrypoints/server.py | 33 ++++++------ src/koi_net/lifecycle.py | 20 +++++++- src/koi_net/secure_manager.py | 10 ++-- src/koi_net/workers/kobj_worker.py | 7 ++- 10 files changed, 139 insertions(+), 59 deletions(-) create mode 100644 src/koi_net/cache.py diff --git a/examples/coordinator.py b/examples/coordinator.py index 4ded18c..7e1cab9 100644 --- a/examples/coordinator.py +++ b/examples/coordinator.py @@ -61,7 +61,7 @@ def handshake_handler(ctx: HandlerContext, kobj: KnowledgeObject): ctx.kobj_queue.push(bundle=edge_bundle) class CoordinatorNode(FullNode): - config_cls = CoordinatorConfig + config_schema = CoordinatorConfig knowledge_handlers = FullNode.knowledge_handlers + [handshake_handler] if __name__ == "__main__": diff --git a/examples/partial.py b/examples/partial.py index 5c634ab..3cfe42d 100644 --- a/examples/partial.py +++ b/examples/partial.py @@ -9,7 +9,7 @@ class MyPartialNodeConfig(PartialNodeConfig): ) class MyPartialNode(PartialNode): - config_cls = MyPartialNodeConfig + config_schema = MyPartialNodeConfig if __name__ == "__main__": node = MyPartialNode() diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 9cc7234..3a792a8 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -45,13 +45,10 @@ class NodeAssembler(metaclass=BuildOrderer): # Self annotation lying to type checker to reflect typing set in node blueprints def __new__(self) -> Self: """Returns assembled node container.""" - return self._build() + return self._build_node() @classmethod - def _build_deps( - cls, - build_order: list[str] - ) -> dict[str, tuple[CompType, list[str]]]: + def _build_deps(cls) -> dict[str, tuple[CompType, list[str]]]: """Returns dependency graph for components defined in `cls_build_order`. Graph representation is a dict where each key is a component name, @@ -60,7 +57,7 @@ def _build_deps( """ dep_graph = {} - for comp_name in build_order: + for comp_name in cls._build_order: try: comp = getattr(cls, comp_name) except AttributeError: @@ -84,9 +81,9 @@ def _build_deps( return dep_graph @classmethod - def _visualize(cls, dep_graph) -> str: + def _visualize(cls) -> str: """Returns representation of dependency graph in Graphviz DOT language.""" - dep_graph = cls._build_deps(cls._build_order) + dep_graph = cls._build_deps() s = "digraph G {\n" for node, (_, neighbors) in dep_graph.items(): @@ -99,11 +96,10 @@ def _visualize(cls, dep_graph) -> str: return s @classmethod - def _build_comps( - cls, - dep_graph: dict[str, tuple[CompType, list[str]]] - ) -> dict[str, Any]: + def _build_comps(cls) -> dict[str, Any]: """Returns assembled components from dependency graph.""" + dep_graph = cls._build_deps() + components: dict[str, Any] = {} for comp_name, (comp_type, dep_names) in dep_graph.items(): comp = getattr(cls, comp_name, None) @@ -119,12 +115,14 @@ def _build_comps( raise Exception(f"Couldn't find required component '{dep_name}'") dependencies[dep_name] = components[dep_name] components[comp_name] = comp(**dependencies) - + return components @classmethod - def _build_node(cls, components: dict[str, Any]) -> NodeContainer: + def _build_node(cls) -> NodeContainer: """Returns node container from components.""" + components = cls._build_comps() + NodeContainer = make_dataclass( cls_name="NodeContainer", fields=[ @@ -136,11 +134,3 @@ def _build_node(cls, components: dict[str, Any]) -> NodeContainer: ) return NodeContainer(**components) - - @classmethod - def _build(cls) -> NodeContainer: - """Returns node container after calling full build process.""" - dep_graph = cls._build_deps(cls._build_order) - comps = cls._build_comps(dep_graph) - node = cls._build_node(comps) - return node \ No newline at end of file diff --git a/src/koi_net/cache.py b/src/koi_net/cache.py new file mode 100644 index 0000000..dacb3c7 --- /dev/null +++ b/src/koi_net/cache.py @@ -0,0 +1,81 @@ +import os +import shutil +from rid_lib.core import RID, RIDType +from rid_lib.ext import Bundle +from rid_lib.ext.utils import b64_encode, b64_decode + +from .config.core import NodeConfig + + +class Cache: + def __init__(self, config: NodeConfig): + self.config = config + + @property + def directory_path(self): + return self.config.koi_net.cache_directory_path + + def file_path_to(self, rid: RID) -> str: + encoded_rid_str = b64_encode(str(rid)) + return f"{self.directory_path}/{encoded_rid_str}.json" + + def write(self, bundle: Bundle) -> Bundle: + """Writes bundle to cache, returns a Bundle.""" + if not os.path.exists(self.directory_path): + os.makedirs(self.directory_path) + + with open( + file=self.file_path_to(bundle.manifest.rid), + mode="w", + encoding="utf-8" + ) as f: + f.write(bundle.model_dump_json(indent=2)) + + return bundle + + def exists(self, rid: RID) -> bool: + return os.path.exists( + self.file_path_to(rid) + ) + + def read(self, rid: RID) -> Bundle | None: + """Reads and returns CacheEntry from RID cache.""" + try: + with open( + file=self.file_path_to(rid), + mode="r", + encoding="utf-8" + ) as f: + return Bundle.model_validate_json(f.read()) + except FileNotFoundError: + return None + + def list_rids(self, rid_types: list[RIDType] | None = None) -> list[RID]: + if not os.path.exists(self.directory_path): + return [] + + rids = [] + for filename in os.listdir(self.directory_path): + encoded_rid_str = filename.split(".")[0] + rid_str = b64_decode(encoded_rid_str) + rid = RID.from_string(rid_str) + + if not rid_types or type(rid) in rid_types: + rids.append(rid) + + return rids + + def delete(self, rid: RID) -> None: + """Deletes cache bundle.""" + try: + os.remove(self.file_path_to(rid)) + except FileNotFoundError: + return + + def drop(self) -> None: + """Deletes all cache bundles.""" + try: + shutil.rmtree(self.directory_path) + except FileNotFoundError: + return + diff --git a/src/koi_net/config/loader.py b/src/koi_net/config/loader.py index 08ee9d2..b8d3d18 100644 --- a/src/koi_net/config/loader.py +++ b/src/koi_net/config/loader.py @@ -20,8 +20,6 @@ def __init__( ): self.config_schema = config_schema self.proxy = config - self.load_from_yaml() - self.save_to_yaml() def load_from_yaml(self): """Loads config from YAML file, or generates it if missing.""" diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 3faaf17..419a786 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,5 +1,4 @@ -from rid_lib.ext import Cache - +from .cache import Cache from .log_system import LogSystem from .assembler import NodeAssembler from .config.core import NodeConfig @@ -34,7 +33,6 @@ secure_profile_handler ) - class BaseNode(NodeAssembler): log_system: LogSystem = LogSystem kobj_queue: KobjQueue = KobjQueue @@ -54,8 +52,7 @@ class BaseNode(NodeAssembler): forget_edge_on_node_deletion ] deref_handlers: list[DerefHandler] = [] - cache: Cache = lambda config: Cache( - directory_path=config.koi_net.cache_directory_path) + cache: Cache = Cache identity: NodeIdentity = NodeIdentity graph: NetworkGraph = NetworkGraph secure_manager: SecureManager = SecureManager diff --git a/src/koi_net/entrypoints/server.py b/src/koi_net/entrypoints/server.py index d57264c..9d50fc4 100644 --- a/src/koi_net/entrypoints/server.py +++ b/src/koi_net/entrypoints/server.py @@ -32,9 +32,8 @@ def __init__( self.config = config self.lifecycle = lifecycle self.response_handler = response_handler - self._build_app() - - def _build_endpoints(self, router: APIRouter): + + def build_endpoints(self, router: APIRouter): """Builds endpoints for API router.""" for path, models in API_MODEL_MAP.items(): def create_endpoint(path: str): @@ -56,22 +55,16 @@ async def endpoint(req): response_model_exclude_none=True ) - def _build_app(self): + def build_app(self): """Builds FastAPI app.""" - @asynccontextmanager - async def lifespan(*args, **kwargs): - async with self.lifecycle.async_run(): - yield - self.app = FastAPI( - lifespan=lifespan, title="KOI-net Protocol API", version="1.1.0" ) self.app.add_exception_handler(ProtocolError, self.protocol_error_handler) self.router = APIRouter(prefix="/koi-net") - self._build_endpoints(self.router) + self.build_endpoints(self.router) self.app.include_router(self.router) def protocol_error_handler(self, request, exc: ProtocolError): @@ -86,10 +79,14 @@ def protocol_error_handler(self, request, exc: ProtocolError): def run(self): """Starts FastAPI server and event handler.""" - uvicorn.run( - app=self.app, - host=self.config.server.host, - port=self.config.server.port, - log_config=None, - access_log=False - ) \ No newline at end of file + + with self.lifecycle.run(): + self.build_app() + + uvicorn.run( + app=self.app, + host=self.config.server.host, + port=self.config.server.port, + log_config=None, + access_log=False + ) \ No newline at end of file diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index 77aabbe..e700eed 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -4,6 +4,9 @@ from rid_lib.ext import Bundle from rid_lib.types import KoiNetNode +from koi_net.config.loader import ConfigLoader +from koi_net.secure_manager import SecureManager + from .sync_manager import SyncManager from .handshaker import Handshaker from .workers.kobj_worker import KnowledgeProcessingWorker @@ -22,6 +25,7 @@ class NodeLifecycle: """Manages node startup and shutdown processes.""" config: NodeConfig + config_loader: ConfigLoader identity: NodeIdentity graph: NetworkGraph kobj_queue: KobjQueue @@ -30,10 +34,12 @@ class NodeLifecycle: event_worker: EventProcessingWorker handshaker: Handshaker sync_manager: SyncManager + secure_manager: SecureManager def __init__( self, config: NodeConfig, + config_loader: ConfigLoader, identity: NodeIdentity, graph: NetworkGraph, kobj_queue: KobjQueue, @@ -41,9 +47,11 @@ def __init__( event_queue: EventQueue, event_worker: EventProcessingWorker, handshaker: Handshaker, - sync_manager: SyncManager + sync_manager: SyncManager, + secure_manager: SecureManager ): self.config = config + self.config_loader = config_loader self.identity = identity self.graph = graph self.kobj_queue = kobj_queue @@ -52,6 +60,7 @@ def __init__( self.event_worker = event_worker self.handshaker = handshaker self.sync_manager = sync_manager + self.secure_manager = secure_manager @contextmanager def run(self): @@ -86,7 +95,14 @@ def start(self): graph from nodes and edges in cache. Processes any state changes of node bundle. Initiates handshake with first contact if node doesn't have any neighbors. Catches up with coordinator state. - """ + """ + + # attempt to load config from yaml, and write back changes (if any) + self.config_loader.load_from_yaml() + self.config_loader.save_to_yaml() + + self.secure_manager.load_priv_key() + self.kobj_worker.thread.start() self.event_worker.thread.start() self.graph.generate() diff --git a/src/koi_net/secure_manager.py b/src/koi_net/secure_manager.py index c2bc90b..46b8e83 100644 --- a/src/koi_net/secure_manager.py +++ b/src/koi_net/secure_manager.py @@ -37,22 +37,20 @@ def __init__( self.identity = identity self.cache = cache self.config = config - - self.priv_key = self._load_priv_key() - def _load_priv_key(self) -> PrivateKey: + def load_priv_key(self) -> PrivateKey: """Loads private key from PEM file path in config.""" # TODO: handle missing private key with open(self.config.koi_net.private_key_pem_path, "r") as f: priv_key_pem = f.read() - return PrivateKey.from_pem( + self.priv_key = PrivateKey.from_pem( priv_key_pem=priv_key_pem, password=self.config.env.priv_key_password ) - def _handle_unknown_node(self, envelope: SignedEnvelope) -> Bundle | None: + def handle_unknown_node(self, envelope: SignedEnvelope) -> Bundle | None: """Attempts to find node profile in proided envelope. If an unknown node sends an envelope, it may still be able to be @@ -89,7 +87,7 @@ def validate_envelope(self, envelope: SignedEnvelope): node_bundle = ( self.cache.read(envelope.source_node) or - self._handle_unknown_node(envelope) + self.handle_unknown_node(envelope) ) if not node_bundle: diff --git a/src/koi_net/workers/kobj_worker.py b/src/koi_net/workers/kobj_worker.py index 0123fd3..cd7277a 100644 --- a/src/koi_net/workers/kobj_worker.py +++ b/src/koi_net/workers/kobj_worker.py @@ -2,6 +2,8 @@ import traceback import structlog +from koi_net.config.core import NodeConfig + from ..processor.pipeline import KnowledgePipeline from ..processor.kobj_queue import KobjQueue from .base import ThreadWorker, STOP_WORKER @@ -14,12 +16,13 @@ class KnowledgeProcessingWorker(ThreadWorker): def __init__( self, + config: NodeConfig, kobj_queue: KobjQueue, pipeline: KnowledgePipeline ): + self.config = config self.kobj_queue = kobj_queue self.pipeline = pipeline - self.timeout: float = 0.1 super().__init__() @@ -27,7 +30,7 @@ def run(self): log.info("Started kobj worker") while True: try: - item = self.kobj_queue.q.get(timeout=self.timeout) + item = self.kobj_queue.q.get(timeout=self.config.koi_net.kobj_worker.queue_timeout) try: if item is STOP_WORKER: log.info("Received 'STOP_WORKER' signal, shutting down...") From 4420ef2bf8df41cc4103e657deb166c4d05b087a Mon Sep 17 00:00:00 2001 From: lukvmil Date: Thu, 20 Nov 2025 15:21:32 -0500 Subject: [PATCH 39/53] automated component build order via kahn's algorithm on dependency graph, working on cycle detection and sending useful dev errors --- src/koi_net/assembler.py | 193 ++++++++++++++++++++++++++++++--------- 1 file changed, 151 insertions(+), 42 deletions(-) diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 3a792a8..0bc60ba 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -1,5 +1,7 @@ +from collections import deque import inspect from enum import StrEnum +from pprint import pp from typing import Any, Protocol, Self from dataclasses import make_dataclass @@ -15,40 +17,58 @@ class CompType(StrEnum): FACTORY = "FACTORY" OBJECT = "OBJECT" -class BuildOrderer(type): - def __new__(cls, name: str, bases: tuple, dct: dict[str]): - """Sets `cls._build_order` from component order in class definition.""" - cls = super().__new__(cls, name, bases, dct) - - if "_build_order" not in dct: - components: dict[str, Any] = {} - # adds components from base classes (including cls) - for base in reversed(inspect.getmro(cls)[:-1]): - for k, v in vars(base).items(): - # excludes built in and private attributes - if not k.startswith("_"): - components[k] = v - - # recipe list constructed from names of non-None components - cls._build_order = [ - name for name, _type in components.items() - if _type is not None - ] - - return cls class NodeContainer(Protocol): """Dummy 'shape' for node containers built by assembler.""" entrypoint = EntryPoint -class NodeAssembler(metaclass=BuildOrderer): +class NodeAssembler: + + # Self annotation lying to type checker to reflect typing set in node blueprints def __new__(self) -> Self: """Returns assembled node container.""" - return self._build_node() + + comps = self._collect_comps() + # pp(list(comps.keys())) + adj, comp_types = self._build_deps(comps) + # pp(adj) + # pp(comp_types) + build_order = self._build_order(adj) + # pp(build_order) + components = self._build_comps(build_order, adj, comp_types) + node = self._build_node(components) + + old = list(comps.keys()) + new = build_order + + result = [] + + for idx, item in enumerate(new): + old_idx = old.index(item) + if old_idx == idx: + result.append(f"{idx}. {item}") + else: + result.append(f"{idx}. {item} (moved from {old_idx})") + + # print("\n".join(result)) + + return node + + @classmethod + def _collect_comps(cls): + comps: dict[str, Any] = {} + # adds components from base classes, including cls) + for base in inspect.getmro(cls)[:-1]: + for k, v in vars(base).items(): + # excludes built in, private, and `None` attributes + if k.startswith("_") or v is None: + continue + comps[k] = v + return comps @classmethod - def _build_deps(cls) -> dict[str, tuple[CompType, list[str]]]: + def _build_deps(cls, comps) -> tuple[dict[str, list[str]], dict[str, CompType]]: """Returns dependency graph for components defined in `cls_build_order`. Graph representation is a dict where each key is a component name, @@ -56,29 +76,114 @@ def _build_deps(cls) -> dict[str, tuple[CompType, list[str]]]: of dependency component names. """ + comp_types = {} dep_graph = {} - for comp_name in cls._build_order: + for comp_name in comps: try: comp = getattr(cls, comp_name) except AttributeError: raise Exception(f"Component '{comp_name}' not found in class definition") if not callable(comp): - comp_type = CompType.OBJECT + comp_types[comp_name] = CompType.OBJECT dep_names = [] elif isinstance(comp, type) and issubclass(comp, BaseModel): - comp_type = CompType.OBJECT + comp_types[comp_name] = CompType.OBJECT dep_names = [] else: sig = inspect.signature(comp) - comp_type = CompType.FACTORY + comp_types[comp_name] = CompType.FACTORY dep_names = list(sig.parameters) - dep_graph[comp_name] = (comp_type, dep_names) + dep_graph[comp_name] = dep_names + + return dep_graph, comp_types + + @classmethod + def _find_cycle(cls, adj) -> list[str]: + visited = set() + stack = [] + on_stack = set() + + def dfs(node): + visited.add(node) + stack.append(node) + on_stack.add(node) + + for nxt in adj[node]: + if nxt not in visited: + cycle = dfs(nxt) + if cycle: + return cycle + + elif nxt in on_stack: + idx = stack.index(nxt) + return stack[idx:] + [nxt] + + stack.pop() + on_stack.remove(node) + return None + + for node in adj: + if node not in visited: + cycle = dfs(node) + if cycle: + return cycle + + return None + + @classmethod + def _build_order(cls, adj) -> list[str]: + # adj list: n -> outgoing neighbors + + # reverse adj list: n -> incoming neighbors + r_adj: dict[str, list[str]] = {} + + # computes reverse adjacency list + for node in adj: + r_adj.setdefault(node, []) + for n in adj[node]: + r_adj.setdefault(n, []) + r_adj[n].append(node) + + out_degree: dict[str, int] = { + n: len(neighbors) + for n, neighbors in adj.items() + } + + queue = deque() + for node in out_degree: + if out_degree[node] == 0: + queue.append(node) + + ordered: list[str] = [] + while queue: + n = queue.popleft() + ordered.append(n) + for next_n in r_adj[n]: + out_degree[next_n] -= 1 + if out_degree[next_n] == 0: + queue.append(next_n) + + + + if len(ordered) != len(adj): + cycle_nodes = set(adj.keys()) - set(ordered) + cycle_adj = {} + for n in list(cycle_nodes): + cycle_adj[n] = set(adj[n]) & cycle_nodes + print(n, "->", cycle_adj[n]) + + cycle = cls._find_cycle(cycle_adj) + + print("FOUND CYCLE") + print(" -> ".join(cycle)) - return dep_graph + print(len(ordered), "/", len(adj)) + + return ordered @classmethod def _visualize(cls) -> str: @@ -86,7 +191,7 @@ def _visualize(cls) -> str: dep_graph = cls._build_deps() s = "digraph G {\n" - for node, (_, neighbors) in dep_graph.items(): + for node, neighbors in dep_graph.items(): sub_s = node if neighbors: sub_s += f"-> {', '.join(neighbors)}" @@ -96,32 +201,36 @@ def _visualize(cls) -> str: return s @classmethod - def _build_comps(cls) -> dict[str, Any]: + def _build_comps( + cls, + build_order: list[str], + dep_graph: dict[str, list[str]], + comp_type: dict[str, CompType] + ) -> dict[str, Any]: """Returns assembled components from dependency graph.""" - dep_graph = cls._build_deps() components: dict[str, Any] = {} - for comp_name, (comp_type, dep_names) in dep_graph.items(): + for comp_name in build_order: + # for comp_name, (comp_type, dep_names) in dep_graph.items(): comp = getattr(cls, comp_name, None) - if comp_type == CompType.OBJECT: + if comp_type[comp_name] == CompType.OBJECT: components[comp_name] = comp - elif comp_type == CompType.FACTORY: + elif comp_type[comp_name] == CompType.FACTORY: # builds depedency dict for current component dependencies = {} - for dep_name in dep_names: - if dep_name not in components: - raise Exception(f"Couldn't find required component '{dep_name}'") - dependencies[dep_name] = components[dep_name] + for dep in dep_graph[comp_name]: + if dep not in components: + raise Exception(f"Couldn't find required component '{dep}'") + dependencies[dep] = components[dep] components[comp_name] = comp(**dependencies) return components @classmethod - def _build_node(cls) -> NodeContainer: + def _build_node(cls, components: dict[str, Any]) -> NodeContainer: """Returns node container from components.""" - components = cls._build_comps() NodeContainer = make_dataclass( cls_name="NodeContainer", From 74adad48e1cb75c4d84331c705ccc7586b77bdd7 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Thu, 20 Nov 2025 16:11:20 -0500 Subject: [PATCH 40/53] version bump -> 1.2.0-beta.6, moved cycle detection to another branch --- pyproject.toml | 2 +- src/koi_net/assembler.py | 73 +++--------------------------- src/koi_net/workers/kobj_worker.py | 3 +- 3 files changed, 9 insertions(+), 69 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 239977f..bb6d799 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "koi-net" -version = "1.2.0b4" +version = "1.2.0b6" description = "Implementation of KOI-net protocol in Python" authors = [ {name = "Luke Miller", email = "luke@block.science"} diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 0bc60ba..46ba0f7 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -30,34 +30,16 @@ def __new__(self) -> Self: """Returns assembled node container.""" comps = self._collect_comps() - # pp(list(comps.keys())) adj, comp_types = self._build_deps(comps) - # pp(adj) - # pp(comp_types) build_order = self._build_order(adj) - # pp(build_order) components = self._build_comps(build_order, adj, comp_types) node = self._build_node(components) - old = list(comps.keys()) - new = build_order - - result = [] - - for idx, item in enumerate(new): - old_idx = old.index(item) - if old_idx == idx: - result.append(f"{idx}. {item}") - else: - result.append(f"{idx}. {item} (moved from {old_idx})") - - # print("\n".join(result)) - return node @classmethod - def _collect_comps(cls): - comps: dict[str, Any] = {} + def _collect_comps(cls) -> dict[str, Any]: + comps = {} # adds components from base classes, including cls) for base in inspect.getmro(cls)[:-1]: for k, v in vars(base).items(): @@ -68,7 +50,9 @@ def _collect_comps(cls): return comps @classmethod - def _build_deps(cls, comps) -> tuple[dict[str, list[str]], dict[str, CompType]]: + def _build_deps( + cls, comps: dict[str, Any] + ) -> tuple[dict[str, list[str]], dict[str, CompType]]: """Returns dependency graph for components defined in `cls_build_order`. Graph representation is a dict where each key is a component name, @@ -102,40 +86,7 @@ def _build_deps(cls, comps) -> tuple[dict[str, list[str]], dict[str, CompType]]: return dep_graph, comp_types @classmethod - def _find_cycle(cls, adj) -> list[str]: - visited = set() - stack = [] - on_stack = set() - - def dfs(node): - visited.add(node) - stack.append(node) - on_stack.add(node) - - for nxt in adj[node]: - if nxt not in visited: - cycle = dfs(nxt) - if cycle: - return cycle - - elif nxt in on_stack: - idx = stack.index(nxt) - return stack[idx:] + [nxt] - - stack.pop() - on_stack.remove(node) - return None - - for node in adj: - if node not in visited: - cycle = dfs(node) - if cycle: - return cycle - - return None - - @classmethod - def _build_order(cls, adj) -> list[str]: + def _build_order(cls, adj: dict[str, list[str]]) -> list[str]: # adj list: n -> outgoing neighbors # reverse adj list: n -> incoming neighbors @@ -171,17 +122,7 @@ def _build_order(cls, adj) -> list[str]: if len(ordered) != len(adj): cycle_nodes = set(adj.keys()) - set(ordered) - cycle_adj = {} - for n in list(cycle_nodes): - cycle_adj[n] = set(adj[n]) & cycle_nodes - print(n, "->", cycle_adj[n]) - - cycle = cls._find_cycle(cycle_adj) - - print("FOUND CYCLE") - print(" -> ".join(cycle)) - - print(len(ordered), "/", len(adj)) + raise Exception(f"Found cycle in dependency graph, the following nodes could not be ordered: {cycle_nodes}") return ordered diff --git a/src/koi_net/workers/kobj_worker.py b/src/koi_net/workers/kobj_worker.py index cd7277a..a2a1038 100644 --- a/src/koi_net/workers/kobj_worker.py +++ b/src/koi_net/workers/kobj_worker.py @@ -2,8 +2,7 @@ import traceback import structlog -from koi_net.config.core import NodeConfig - +from ..config.core import NodeConfig from ..processor.pipeline import KnowledgePipeline from ..processor.kobj_queue import KobjQueue from .base import ThreadWorker, STOP_WORKER From 7fd41c208a540d64c0eccbf1f45a8e525f182fed Mon Sep 17 00:00:00 2001 From: lukvmil Date: Thu, 20 Nov 2025 18:17:12 -0500 Subject: [PATCH 41/53] moved start stop logic into worker classes --- src/koi_net/lifecycle.py | 9 ++++----- src/koi_net/workers/base.py | 3 +++ src/koi_net/workers/event_worker.py | 3 +++ src/koi_net/workers/kobj_worker.py | 3 +++ 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index e700eed..81137b0 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -12,7 +12,6 @@ from .workers.kobj_worker import KnowledgeProcessingWorker from .network.event_queue import EventQueue from .workers import EventProcessingWorker -from .workers.base import STOP_WORKER from .config.core import NodeConfig from .processor.kobj_queue import KobjQueue from .network.graph import NetworkGraph @@ -103,8 +102,8 @@ def start(self): self.secure_manager.load_priv_key() - self.kobj_worker.thread.start() - self.event_worker.thread.start() + self.kobj_worker.start() + self.event_worker.start() self.graph.generate() # refresh to reflect changes (if any) in config.yaml node profile @@ -131,5 +130,5 @@ def start(self): def stop(self): """Stops a node, send stop signals to workers.""" - self.kobj_queue.q.put(STOP_WORKER) - self.event_queue.q.put(STOP_WORKER) \ No newline at end of file + self.kobj_worker.stop() + self.event_worker.stop() \ No newline at end of file diff --git a/src/koi_net/workers/base.py b/src/koi_net/workers/base.py index fa8d227..c42f92b 100644 --- a/src/koi_net/workers/base.py +++ b/src/koi_net/workers/base.py @@ -15,6 +15,9 @@ class ThreadWorker: def __init__(self): self.thread = threading.Thread(target=self.run) + def start(self): + self.thread.start() + def run(self): """Processing loop for thread.""" pass \ No newline at end of file diff --git a/src/koi_net/workers/event_worker.py b/src/koi_net/workers/event_worker.py index 17ec4db..e8abdaf 100644 --- a/src/koi_net/workers/event_worker.py +++ b/src/koi_net/workers/event_worker.py @@ -46,6 +46,9 @@ def flush_and_broadcast(self, target: KoiNetNode): self.request_handler.broadcast_events(target, events=events) except Exception as e: traceback.print_exc() + + def stop(self): + self.event_queue.q.put(STOP_WORKER) def run(self): log.info("Started event worker") diff --git a/src/koi_net/workers/kobj_worker.py b/src/koi_net/workers/kobj_worker.py index a2a1038..d7cdbde 100644 --- a/src/koi_net/workers/kobj_worker.py +++ b/src/koi_net/workers/kobj_worker.py @@ -25,6 +25,9 @@ def __init__( super().__init__() + def stop(self): + self.kobj_queue.q.put(STOP_WORKER) + def run(self): log.info("Started kobj worker") while True: From 38624ce1f00fdde4c58b206c1476caf698074df7 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 2 Dec 2025 11:53:49 -0500 Subject: [PATCH 42/53] factoring out lifecycle component, instead the node container object will automatically run `start()` and `stop()` functions in build and reverse build order for components which provide them. config now loads at init time, not at startup. entrypoint components `run()` method treated as special main thread function by container. --- examples/coordinator.py | 2 +- examples/partial.py | 2 +- src/koi_net/assembler.py | 73 ++++++++++++++++++++++--------- src/koi_net/catcher_upper.py | 36 +++++++++++++++ src/koi_net/config/loader.py | 8 ++++ src/koi_net/core.py | 6 ++- src/koi_net/entrypoints/poller.py | 20 ++++----- src/koi_net/entrypoints/server.py | 24 +++++----- src/koi_net/lifecycle.py | 17 +++---- src/koi_net/network/graph.py | 3 ++ src/koi_net/secure_manager.py | 3 ++ src/koi_net/self_starter.py | 27 ++++++++++++ 12 files changed, 168 insertions(+), 53 deletions(-) create mode 100644 src/koi_net/catcher_upper.py create mode 100644 src/koi_net/self_starter.py diff --git a/examples/coordinator.py b/examples/coordinator.py index 7e1cab9..a1cd33a 100644 --- a/examples/coordinator.py +++ b/examples/coordinator.py @@ -66,4 +66,4 @@ class CoordinatorNode(FullNode): if __name__ == "__main__": node = CoordinatorNode() - node.entrypoint.run() \ No newline at end of file + # node.entrypoint.run() \ No newline at end of file diff --git a/examples/partial.py b/examples/partial.py index 3cfe42d..6d06232 100644 --- a/examples/partial.py +++ b/examples/partial.py @@ -13,4 +13,4 @@ class MyPartialNode(PartialNode): if __name__ == "__main__": node = MyPartialNode() - node.entrypoint.run() \ No newline at end of file + # node.entrypoint.run() \ No newline at end of file diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 46ba0f7..5c0384c 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -8,8 +8,6 @@ import structlog from pydantic import BaseModel -from .entrypoints.base import EntryPoint - log = structlog.stdlib.get_logger() @@ -18,22 +16,47 @@ class CompType(StrEnum): OBJECT = "OBJECT" -class NodeContainer(Protocol): +class BaseNodeContainer(Protocol): """Dummy 'shape' for node containers built by assembler.""" - entrypoint = EntryPoint - -class NodeAssembler: + _build_order: list[str] + entrypoint: Any + def run(self): + try: + self.start() + self.entrypoint.run() + except KeyboardInterrupt: + ... + finally: + self.stop() + def start(self): + for comp_name in self._build_order: + # print(comp_name) + comp = getattr(self, comp_name) + if getattr(comp, "start", None): + print(f"Starting {comp_name}...") + comp.start() + + def stop(self): + for comp_name in reversed(self._build_order): + comp = getattr(self, comp_name) + if getattr(comp, "stop", None): + print(f"Stopping {comp_name}...") + comp.stop() + +class NodeAssembler: # Self annotation lying to type checker to reflect typing set in node blueprints def __new__(self) -> Self: """Returns assembled node container.""" comps = self._collect_comps() adj, comp_types = self._build_deps(comps) - build_order = self._build_order(adj) + build_order, startup_order = self._build_order(adj) + [print(f"{i}: {comp}") for i, comp in enumerate(build_order)] + print(startup_order) components = self._build_comps(build_order, adj, comp_types) - node = self._build_node(components) + node = self._build_node(components, build_order) return node @@ -109,22 +132,26 @@ def _build_order(cls, adj: dict[str, list[str]]) -> list[str]: if out_degree[node] == 0: queue.append(node) - ordered: list[str] = [] + build_order: list[str] = [] + startup_order: list[str] = [] while queue: n = queue.popleft() - ordered.append(n) + build_order.append(n) + comp = getattr(cls, n) + if getattr(comp, "start", None): + startup_order.append(n) + for next_n in r_adj[n]: out_degree[next_n] -= 1 if out_degree[next_n] == 0: queue.append(next_n) - - - if len(ordered) != len(adj): - cycle_nodes = set(adj.keys()) - set(ordered) + print(len(build_order), len(adj)) + if len(build_order) != len(adj): + cycle_nodes = set(adj.keys()) - set(build_order) raise Exception(f"Found cycle in dependency graph, the following nodes could not be ordered: {cycle_nodes}") - return ordered + return build_order, startup_order @classmethod def _visualize(cls) -> str: @@ -170,17 +197,23 @@ def _build_comps( return components @classmethod - def _build_node(cls, components: dict[str, Any]) -> NodeContainer: + def _build_node( + cls, + components: dict[str, Any], + build_order: list[str] + ) -> BaseNodeContainer: """Returns node container from components.""" NodeContainer = make_dataclass( cls_name="NodeContainer", - fields=[ - (name, type(component)) + fields=(("_build_order", build_order),) + tuple( + (name, type(component)) for name, component in components.items() - ], + ), + bases=(BaseNodeContainer,), frozen=True ) - return NodeContainer(**components) + return NodeContainer(_build_order=build_order, **components) + \ No newline at end of file diff --git a/src/koi_net/catcher_upper.py b/src/koi_net/catcher_upper.py new file mode 100644 index 0000000..91d8b35 --- /dev/null +++ b/src/koi_net/catcher_upper.py @@ -0,0 +1,36 @@ +import structlog +from rid_lib.types import KoiNetNode +from koi_net.config.core import NodeConfig +from koi_net.handshaker import Handshaker +from koi_net.network.graph import NetworkGraph +from koi_net.sync_manager import SyncManager + +log = structlog.stdlib.get_logger() + + +class CatcherUpper: + def __init__( + self, + graph: NetworkGraph, + sync_manager: SyncManager, + handshaker: Handshaker, + config: NodeConfig + ): + self.graph = graph + self.sync_manager = sync_manager + self.handshaker = handshaker + self.config = config + + def start(self): + node_providers = self.graph.get_neighbors( + direction="in", + allowed_type=KoiNetNode + ) + + if node_providers: + log.debug(f"Catching up with `orn:koi-net.node` providers: {node_providers}") + self.sync_manager.catch_up_with(node_providers, [KoiNetNode]) + + elif self.config.koi_net.first_contact.rid: + log.debug(f"No edges with `orn:koi-net.node` providers, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") + self.handshaker.handshake_with(self.config.koi_net.first_contact.rid) \ No newline at end of file diff --git a/src/koi_net/config/loader.py b/src/koi_net/config/loader.py index b8d3d18..3801808 100644 --- a/src/koi_net/config/loader.py +++ b/src/koi_net/config/loader.py @@ -20,6 +20,14 @@ def __init__( ): self.config_schema = config_schema self.proxy = config + + # this is a special case to allow config state dependent components + # to initialize without a "lazy initialization" approach, in general + # components SHOULD NOT execute code in their init phase + self.load_from_yaml() + + def start(self): + self.save_to_yaml() def load_from_yaml(self): """Loads config from YAML file, or generates it if missing.""" diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 419a786..6505fbd 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -22,6 +22,8 @@ from .processor.kobj_queue import KobjQueue from .processor.handler import KnowledgeHandler from .secure_manager import SecureManager +from .catcher_upper import CatcherUpper +from .self_starter import SelfStart from .entrypoints import NodeServer, NodePoller from .processor.knowledge_handlers import ( basic_manifest_handler, @@ -67,7 +69,9 @@ class BaseNode(NodeAssembler): pipeline: KnowledgePipeline = KnowledgePipeline kobj_worker: KnowledgeProcessingWorker = KnowledgeProcessingWorker event_worker: EventProcessingWorker = EventProcessingWorker - lifecycle: NodeLifecycle = NodeLifecycle + catcher_upper: CatcherUpper = CatcherUpper + self_start: SelfStart = SelfStart + # lifecycle: NodeLifecycle = NodeLifecycle class FullNode(BaseNode): entrypoint: NodeServer = NodeServer diff --git a/src/koi_net/entrypoints/poller.py b/src/koi_net/entrypoints/poller.py index 4d8d389..ae256a2 100644 --- a/src/koi_net/entrypoints/poller.py +++ b/src/koi_net/entrypoints/poller.py @@ -21,12 +21,12 @@ class NodePoller(EntryPoint): def __init__( self, config: PartialNodeConfig, - lifecycle: NodeLifecycle, + # lifecycle: NodeLifecycle, kobj_queue: KobjQueue, resolver: NetworkResolver, ): self.kobj_queue = kobj_queue - self.lifecycle = lifecycle + # self.lifecycle = lifecycle self.resolver = resolver self.config = config @@ -38,11 +38,11 @@ def poll(self): def run(self): """Runs polling event loop.""" - with self.lifecycle.run(): - while True: - start_time = time.time() - self.poll() - elapsed = time.time() - start_time - sleep_time = self.config.poller.polling_interval - elapsed - if sleep_time > 0: - time.sleep(sleep_time) \ No newline at end of file + # with self.lifecycle.run(): + while True: + start_time = time.time() + self.poll() + elapsed = time.time() - start_time + sleep_time = self.config.poller.polling_interval - elapsed + if sleep_time > 0: + time.sleep(sleep_time) \ No newline at end of file diff --git a/src/koi_net/entrypoints/server.py b/src/koi_net/entrypoints/server.py index 9d50fc4..c2ee442 100644 --- a/src/koi_net/entrypoints/server.py +++ b/src/koi_net/entrypoints/server.py @@ -26,11 +26,11 @@ class NodeServer(EntryPoint): def __init__( self, config: FullNodeConfig, - lifecycle: NodeLifecycle, + # lifecycle: NodeLifecycle, response_handler: ResponseHandler, ): self.config = config - self.lifecycle = lifecycle + # self.lifecycle = lifecycle self.response_handler = response_handler def build_endpoints(self, router: APIRouter): @@ -80,13 +80,13 @@ def protocol_error_handler(self, request, exc: ProtocolError): def run(self): """Starts FastAPI server and event handler.""" - with self.lifecycle.run(): - self.build_app() - - uvicorn.run( - app=self.app, - host=self.config.server.host, - port=self.config.server.port, - log_config=None, - access_log=False - ) \ No newline at end of file + # with self.lifecycle.run(): + self.build_app() + + uvicorn.run( + app=self.app, + host=self.config.server.host, + port=self.config.server.port, + log_config=None, + access_log=False + ) \ No newline at end of file diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py index 81137b0..93382e7 100644 --- a/src/koi_net/lifecycle.py +++ b/src/koi_net/lifecycle.py @@ -97,14 +97,14 @@ def start(self): """ # attempt to load config from yaml, and write back changes (if any) - self.config_loader.load_from_yaml() - self.config_loader.save_to_yaml() + # self.config_loader.load_from_yaml() + # self.config_loader.save_to_yaml() - self.secure_manager.load_priv_key() + # self.secure_manager.load_priv_key() - self.kobj_worker.start() - self.event_worker.start() - self.graph.generate() + # self.kobj_worker.start() + # self.event_worker.start() + # self.graph.generate() # refresh to reflect changes (if any) in config.yaml node profile self.kobj_queue.push(bundle=Bundle.generate( @@ -112,6 +112,7 @@ def start(self): contents=self.identity.profile.model_dump() )) + # why am I doing this? waiting for self bundle to process before continuing in the process self.kobj_queue.q.join() node_providers = self.graph.get_neighbors( @@ -130,5 +131,5 @@ def start(self): def stop(self): """Stops a node, send stop signals to workers.""" - self.kobj_worker.stop() - self.event_worker.stop() \ No newline at end of file + # self.kobj_worker.stop() + # self.event_worker.stop() \ No newline at end of file diff --git a/src/koi_net/network/graph.py b/src/koi_net/network/graph.py index d4e5325..3d73531 100644 --- a/src/koi_net/network/graph.py +++ b/src/koi_net/network/graph.py @@ -21,6 +21,9 @@ def __init__(self, cache: Cache, identity: NodeIdentity): self.cache = cache self.dg = nx.DiGraph() self.identity = identity + + def start(self): + self.generate() def generate(self): """Generates directed graph from cached KOI nodes and edges.""" diff --git a/src/koi_net/secure_manager.py b/src/koi_net/secure_manager.py index 46b8e83..fb49c20 100644 --- a/src/koi_net/secure_manager.py +++ b/src/koi_net/secure_manager.py @@ -38,6 +38,9 @@ def __init__( self.cache = cache self.config = config + def start(self): + self.load_priv_key() + def load_priv_key(self) -> PrivateKey: """Loads private key from PEM file path in config.""" diff --git a/src/koi_net/self_starter.py b/src/koi_net/self_starter.py new file mode 100644 index 0000000..43c4485 --- /dev/null +++ b/src/koi_net/self_starter.py @@ -0,0 +1,27 @@ +from rid_lib.ext import Bundle +from koi_net.identity import NodeIdentity +from koi_net.processor.kobj_queue import KobjQueue + + +class SelfStart: + def __init__( + self, + kobj_queue: KobjQueue, + identity: NodeIdentity + ): + self.kobj_queue = kobj_queue + self.identity = identity + + def start(self): + print(self.identity.rid) + print(self.identity.profile) + + self_bundle = Bundle.generate( + rid=self.identity.rid, + contents=self.identity.profile.model_dump() + ) + + self.kobj_queue.push(bundle=self_bundle) + + # will freeze if called before kobj worker is started: + # self.kobj_queue.q.join() \ No newline at end of file From 7f9f76db6f5f6df2e769189e991c540da7150f5a Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 2 Dec 2025 18:36:19 -0500 Subject: [PATCH 43/53] fully factored out lifecycle component, refactored assembly mechanism: new assembly artifact class does most of the assembly work, built once for each node assembly; node assembly builds components and passes them along with the artifact into a new node container class. node container class has start, stop, and run functions. they will automatically turn components on in order and then run the entrypoint. experimenting with explicit overrides, cache schema has a @component.static decorator now. added a consts module --- examples/partial.py | 2 +- src/koi_net/artifact.py | 163 ++++++++++++++++++++++ src/koi_net/assembler.py | 220 ++++-------------------------- src/koi_net/assembler_consts.py | 5 + src/koi_net/component.py | 6 + src/koi_net/config/core.py | 4 + src/koi_net/container.py | 39 ++++++ src/koi_net/core.py | 2 - src/koi_net/entrypoints/poller.py | 5 - src/koi_net/entrypoints/server.py | 6 - src/koi_net/lifecycle.py | 135 ------------------ 11 files changed, 247 insertions(+), 340 deletions(-) create mode 100644 src/koi_net/artifact.py create mode 100644 src/koi_net/assembler_consts.py create mode 100644 src/koi_net/component.py create mode 100644 src/koi_net/container.py delete mode 100644 src/koi_net/lifecycle.py diff --git a/examples/partial.py b/examples/partial.py index 6d06232..4f0436b 100644 --- a/examples/partial.py +++ b/examples/partial.py @@ -13,4 +13,4 @@ class MyPartialNode(PartialNode): if __name__ == "__main__": node = MyPartialNode() - # node.entrypoint.run() \ No newline at end of file + node.run() \ No newline at end of file diff --git a/src/koi_net/artifact.py b/src/koi_net/artifact.py new file mode 100644 index 0000000..3117cab --- /dev/null +++ b/src/koi_net/artifact.py @@ -0,0 +1,163 @@ +import inspect +from collections import deque +from enum import StrEnum +from typing import Any +from pydantic import BaseModel + +from koi_net.assembler_consts import COMP_TYPE_OVERRIDE, START_FUNC_NAME, START_ORDER_OVERRIDE, STOP_FUNC_NAME, STOP_ORDER_OVERRIDE + + +class CompType(StrEnum): + FACTORY = "FACTORY" + OBJECT = "OBJECT" + +class AssemblyArtifact: + assembler: Any + comp_dict: dict[str, Any] + dep_graph: dict[str, list[str]] + comp_types: dict[str, CompType] + init_order: list[str] + start_order: list[str] + stop_order: list[str] + + def __init__(self, assembler): + self.assembler = assembler + + def collect_comps(self): + """Collects components into `comp_dict` from class definition.""" + + self.comp_dict = {} + # adds components from class and all base classes. skips `type`, and runs in reverse so that sub classes override super class values + for base in reversed(inspect.getmro(self.assembler)[:-1]): + for k, v in vars(base).items(): + # excludes built in, private, and `None` attributes + if k.startswith("_") or v is None: + continue + + self.comp_dict[k] = v + + def build_dependencies(self): + """Builds dependency graph and component type map. + + Graph representation is an adjacency list: each key is a component + name, and the value is a tuple containing a list of dependency + component names. + """ + + self.comp_types = {} + self.dep_graph = {} + for comp_name, comp in self.comp_dict.items(): + explicit_type = getattr(comp, COMP_TYPE_OVERRIDE, None) + + dep_names = [] + if explicit_type: + self.comp_types[comp_name] = explicit_type + + elif not callable(comp): + self.comp_types[comp_name] = CompType.OBJECT + + elif isinstance(comp, type) and issubclass(comp, BaseModel): + self.comp_types[comp_name] = CompType.OBJECT + + else: + sig = inspect.signature(comp) + self.comp_types[comp_name] = CompType.FACTORY + dep_names = list(sig.parameters) + + self.dep_graph[comp_name] = dep_names + + [print(f"{i}: {comp_name} -> {deps}") for i, (comp_name, deps) in enumerate(self.dep_graph.items())] + + def build_init_order(self): + # adj list: n -> outgoing neighbors + + # reverse adj list: n -> incoming neighbors + r_adj: dict[str, list[str]] = {} + + # computes reverse adjacency list + for node in self.dep_graph: + r_adj.setdefault(node, []) + for n in self.dep_graph[node]: + r_adj.setdefault(n, []) + r_adj[n].append(node) + + out_degree: dict[str, int] = { + n: len(neighbors) + for n, neighbors in self.dep_graph.items() + } + + queue = deque() + for node in out_degree: + if out_degree[node] == 0: + queue.append(node) + + self.init_order = [] + while queue: + n = queue.popleft() + self.init_order.append(n) + + for next_n in r_adj[n]: + out_degree[next_n] -= 1 + if out_degree[next_n] == 0: + queue.append(next_n) + + if len(self.init_order) != len(self.dep_graph): + cycle_nodes = set(self.dep_graph.keys()) - set(self.init_order) + raise Exception(f"Found cycle in dependency graph, the following nodes could not be ordered: {cycle_nodes}") + + print("\ninit order") + [print(f"{i}: {comp_name}") for i, comp_name in enumerate(self.init_order)] + + def build_start_order(self): + start_order_override = getattr( + self.assembler, START_ORDER_OVERRIDE, None) + + if start_order_override: + self.start_order = start_order_override + else: + self.start_order = [] + for comp_name in self.init_order: + comp = self.comp_dict[comp_name] + if getattr(comp, START_FUNC_NAME, None): + self.start_order.append(comp_name) + + print("\nstart order") + [print(f"{i}: {comp_name}") for i, comp_name in enumerate(self.start_order)] + + def build_stop_order(self): + stop_order_override = getattr( + self.assembler, STOP_ORDER_OVERRIDE, None) + + if stop_order_override: + self.stop_order = stop_order_override + else: + self.stop_order = [] + for comp_name in reversed(self.init_order): + comp = self.comp_dict[comp_name] + if getattr(comp, STOP_FUNC_NAME, None): + self.stop_order.append(comp_name) + + print("\nstop order") + [print(f"{i}: {comp_name}") for i, comp_name in enumerate(self.stop_order)] + + + def visualize(self) -> str: + """Returns representation of dependency graph in Graphviz DOT language.""" + + s = "digraph G {\n" + for node, neighbors in self.dep_graph.items(): + sub_s = node + if neighbors: + sub_s += f"-> {', '.join(neighbors)}" + sub_s = sub_s.replace("graph", "graph_") + ";" + s += " " * 4 + sub_s + "\n" + s += "}" + self.graphviz = s + + def build(self): + self.collect_comps() + self.build_dependencies() + self.build_init_order() + self.build_start_order() + self.build_stop_order() + self.visualize() diff --git a/src/koi_net/assembler.py b/src/koi_net/assembler.py index 5c0384c..00690b1 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/assembler.py @@ -1,219 +1,57 @@ -from collections import deque -import inspect -from enum import StrEnum -from pprint import pp -from typing import Any, Protocol, Self -from dataclasses import make_dataclass + +from typing import Any, Self import structlog -from pydantic import BaseModel -log = structlog.stdlib.get_logger() +from .artifact import AssemblyArtifact, CompType +from .container import NodeContainer +log = structlog.stdlib.get_logger() -class CompType(StrEnum): - FACTORY = "FACTORY" - OBJECT = "OBJECT" -class BaseNodeContainer(Protocol): - """Dummy 'shape' for node containers built by assembler.""" - _build_order: list[str] - entrypoint: Any +class NodeAssembler: + _artifact: AssemblyArtifact = None - def run(self): - try: - self.start() - self.entrypoint.run() - except KeyboardInterrupt: - ... - finally: - self.stop() + _start_order: list[str] + _stop_order: list[str] - def start(self): - for comp_name in self._build_order: - # print(comp_name) - comp = getattr(self, comp_name) - if getattr(comp, "start", None): - print(f"Starting {comp_name}...") - comp.start() - - def stop(self): - for comp_name in reversed(self._build_order): - comp = getattr(self, comp_name) - if getattr(comp, "stop", None): - print(f"Stopping {comp_name}...") - comp.stop() - -class NodeAssembler: - # Self annotation lying to type checker to reflect typing set in node blueprints - def __new__(self) -> Self: + # annotation hack to show the components and container methods + def __new__(cls) -> Self | NodeContainer: """Returns assembled node container.""" - comps = self._collect_comps() - adj, comp_types = self._build_deps(comps) - build_order, startup_order = self._build_order(adj) - [print(f"{i}: {comp}") for i, comp in enumerate(build_order)] - print(startup_order) - components = self._build_comps(build_order, adj, comp_types) - node = self._build_node(components, build_order) + # builds assembly artifact if it doesn't exist + if not cls._artifact: + cls._artifact = AssemblyArtifact(cls) + cls._artifact.build() - return node - - @classmethod - def _collect_comps(cls) -> dict[str, Any]: - comps = {} - # adds components from base classes, including cls) - for base in inspect.getmro(cls)[:-1]: - for k, v in vars(base).items(): - # excludes built in, private, and `None` attributes - if k.startswith("_") or v is None: - continue - comps[k] = v - return comps - - @classmethod - def _build_deps( - cls, comps: dict[str, Any] - ) -> tuple[dict[str, list[str]], dict[str, CompType]]: - """Returns dependency graph for components defined in `cls_build_order`. - - Graph representation is a dict where each key is a component name, - and the value is tuple containing the component type, and a list - of dependency component names. - """ + components = cls._build_components(cls._artifact) - comp_types = {} - dep_graph = {} - for comp_name in comps: - try: - comp = getattr(cls, comp_name) - except AttributeError: - raise Exception(f"Component '{comp_name}' not found in class definition") - - if not callable(comp): - comp_types[comp_name] = CompType.OBJECT - dep_names = [] - - elif isinstance(comp, type) and issubclass(comp, BaseModel): - comp_types[comp_name] = CompType.OBJECT - dep_names = [] - - else: - sig = inspect.signature(comp) - comp_types[comp_name] = CompType.FACTORY - dep_names = list(sig.parameters) - - dep_graph[comp_name] = dep_names - - return dep_graph, comp_types + return NodeContainer(cls._artifact, **components) - @classmethod - def _build_order(cls, adj: dict[str, list[str]]) -> list[str]: - # adj list: n -> outgoing neighbors - - # reverse adj list: n -> incoming neighbors - r_adj: dict[str, list[str]] = {} - - # computes reverse adjacency list - for node in adj: - r_adj.setdefault(node, []) - for n in adj[node]: - r_adj.setdefault(n, []) - r_adj[n].append(node) - - out_degree: dict[str, int] = { - n: len(neighbors) - for n, neighbors in adj.items() - } - - queue = deque() - for node in out_degree: - if out_degree[node] == 0: - queue.append(node) - - build_order: list[str] = [] - startup_order: list[str] = [] - while queue: - n = queue.popleft() - build_order.append(n) - comp = getattr(cls, n) - if getattr(comp, "start", None): - startup_order.append(n) - - for next_n in r_adj[n]: - out_degree[next_n] -= 1 - if out_degree[next_n] == 0: - queue.append(next_n) - - print(len(build_order), len(adj)) - if len(build_order) != len(adj): - cycle_nodes = set(adj.keys()) - set(build_order) - raise Exception(f"Found cycle in dependency graph, the following nodes could not be ordered: {cycle_nodes}") - - return build_order, startup_order - - @classmethod - def _visualize(cls) -> str: - """Returns representation of dependency graph in Graphviz DOT language.""" - dep_graph = cls._build_deps() - - s = "digraph G {\n" - for node, neighbors in dep_graph.items(): - sub_s = node - if neighbors: - sub_s += f"-> {', '.join(neighbors)}" - sub_s = sub_s.replace("graph", "graph_") + ";" - s += " " * 4 + sub_s + "\n" - s += "}" - return s - - @classmethod - def _build_comps( - cls, - build_order: list[str], - dep_graph: dict[str, list[str]], - comp_type: dict[str, CompType] - ) -> dict[str, Any]: - """Returns assembled components from dependency graph.""" + @staticmethod + def _build_components(artifact: AssemblyArtifact): + """Returns assembled components as a dict.""" + print("\nbuilding components") components: dict[str, Any] = {} - for comp_name in build_order: + for comp_name in artifact.init_order: # for comp_name, (comp_type, dep_names) in dep_graph.items(): - comp = getattr(cls, comp_name, None) + comp = artifact.comp_dict[comp_name] + comp_type = artifact.comp_types[comp_name] + + print(comp, comp_type) - if comp_type[comp_name] == CompType.OBJECT: + if comp_type == CompType.OBJECT: components[comp_name] = comp - elif comp_type[comp_name] == CompType.FACTORY: + elif comp_type == CompType.FACTORY: # builds depedency dict for current component dependencies = {} - for dep in dep_graph[comp_name]: + for dep in artifact.dep_graph[comp_name]: if dep not in components: raise Exception(f"Couldn't find required component '{dep}'") dependencies[dep] = components[dep] components[comp_name] = comp(**dependencies) return components - - @classmethod - def _build_node( - cls, - components: dict[str, Any], - build_order: list[str] - ) -> BaseNodeContainer: - """Returns node container from components.""" - - NodeContainer = make_dataclass( - cls_name="NodeContainer", - fields=(("_build_order", build_order),) + tuple( - (name, type(component)) - for name, component - in components.items() - ), - bases=(BaseNodeContainer,), - frozen=True - ) - - return NodeContainer(_build_order=build_order, **components) - \ No newline at end of file diff --git a/src/koi_net/assembler_consts.py b/src/koi_net/assembler_consts.py new file mode 100644 index 0000000..86488f3 --- /dev/null +++ b/src/koi_net/assembler_consts.py @@ -0,0 +1,5 @@ +START_FUNC_NAME = "start" +STOP_FUNC_NAME = "stop" +START_ORDER_OVERRIDE = "_start_order" +STOP_ORDER_OVERRIDE = "_stop_order" +COMP_TYPE_OVERRIDE = "_comp_type" \ No newline at end of file diff --git a/src/koi_net/component.py b/src/koi_net/component.py new file mode 100644 index 0000000..ab29877 --- /dev/null +++ b/src/koi_net/component.py @@ -0,0 +1,6 @@ +from .assembler_consts import COMP_TYPE_OVERRIDE +from .artifact import CompType + +def static(cls): + setattr(cls, COMP_TYPE_OVERRIDE, CompType.OBJECT) + return cls \ No newline at end of file diff --git a/src/koi_net/config/core.py b/src/koi_net/config/core.py index d95747b..c3ed5e6 100644 --- a/src/koi_net/config/core.py +++ b/src/koi_net/config/core.py @@ -5,6 +5,7 @@ from rid_lib.types import KoiNetNode import structlog +from koi_net import component from koi_net.protocol.secure import PrivateKey from ..protocol.node import NodeProfile @@ -65,6 +66,9 @@ def __getattribute__(self, name): return env_val return value +# marking this component as static, classes are implicitly treated as +# factories, but this needs to be passed as is +@component.static class NodeConfig(BaseModel): """Base node config class, intended to be extended.""" diff --git a/src/koi_net/container.py b/src/koi_net/container.py new file mode 100644 index 0000000..9693d1b --- /dev/null +++ b/src/koi_net/container.py @@ -0,0 +1,39 @@ +import structlog + +from .entrypoints.base import EntryPoint +from .artifact import AssemblyArtifact + +log = structlog.stdlib.get_logger() + + +class NodeContainer: + """Dummy 'shape' for node containers built by assembler.""" + _artifact: AssemblyArtifact + entrypoint: EntryPoint + + def __init__(self, artifact, **kwargs): + self._artifact = artifact + + for name, comp in kwargs.items(): + setattr(self, name, comp) + + def run(self): + try: + self.start() + self.entrypoint.run() + except KeyboardInterrupt: + log.info("Keyboard interrupt!") + finally: + self.stop() + + def start(self): + log.info("Starting node...") + for comp_name in self._artifact.start_order: + comp = getattr(self, comp_name) + comp.start() + + def stop(self): + log.info("Stopping node...") + for comp_name in self._artifact.stop_order: + comp = getattr(self, comp_name) + comp.stop() \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 6505fbd..1f12c66 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -10,7 +10,6 @@ from .sync_manager import SyncManager from .identity import NodeIdentity from .workers import KnowledgeProcessingWorker, EventProcessingWorker -from .lifecycle import NodeLifecycle from .network.error_handler import ErrorHandler from .network.event_queue import EventQueue from .network.graph import NetworkGraph @@ -71,7 +70,6 @@ class BaseNode(NodeAssembler): event_worker: EventProcessingWorker = EventProcessingWorker catcher_upper: CatcherUpper = CatcherUpper self_start: SelfStart = SelfStart - # lifecycle: NodeLifecycle = NodeLifecycle class FullNode(BaseNode): entrypoint: NodeServer = NodeServer diff --git a/src/koi_net/entrypoints/poller.py b/src/koi_net/entrypoints/poller.py index ae256a2..dd86c03 100644 --- a/src/koi_net/entrypoints/poller.py +++ b/src/koi_net/entrypoints/poller.py @@ -4,7 +4,6 @@ from .base import EntryPoint from ..processor.kobj_queue import KobjQueue -from ..lifecycle import NodeLifecycle from ..network.resolver import NetworkResolver from ..config.partial_node import PartialNodeConfig @@ -14,19 +13,16 @@ class NodePoller(EntryPoint): """Entry point for partial nodes, manages polling event loop.""" kobj_queue: KobjQueue - lifecycle: NodeLifecycle resolver: NetworkResolver config: PartialNodeConfig def __init__( self, config: PartialNodeConfig, - # lifecycle: NodeLifecycle, kobj_queue: KobjQueue, resolver: NetworkResolver, ): self.kobj_queue = kobj_queue - # self.lifecycle = lifecycle self.resolver = resolver self.config = config @@ -38,7 +34,6 @@ def poll(self): def run(self): """Runs polling event loop.""" - # with self.lifecycle.run(): while True: start_time = time.time() self.poll() diff --git a/src/koi_net/entrypoints/server.py b/src/koi_net/entrypoints/server.py index c2ee442..78fbacb 100644 --- a/src/koi_net/entrypoints/server.py +++ b/src/koi_net/entrypoints/server.py @@ -1,6 +1,5 @@ import structlog import uvicorn -from contextlib import asynccontextmanager from fastapi import FastAPI, APIRouter from fastapi.responses import JSONResponse @@ -9,7 +8,6 @@ from ..protocol.model_map import API_MODEL_MAP from ..protocol.api_models import ErrorResponse from ..protocol.errors import ProtocolError -from ..lifecycle import NodeLifecycle from ..config.full_node import FullNodeConfig log = structlog.stdlib.get_logger() @@ -18,7 +16,6 @@ class NodeServer(EntryPoint): """Entry point for full nodes, manages FastAPI server.""" config: FullNodeConfig - lifecycle: NodeLifecycle response_handler: ResponseHandler app: FastAPI router: APIRouter @@ -26,11 +23,9 @@ class NodeServer(EntryPoint): def __init__( self, config: FullNodeConfig, - # lifecycle: NodeLifecycle, response_handler: ResponseHandler, ): self.config = config - # self.lifecycle = lifecycle self.response_handler = response_handler def build_endpoints(self, router: APIRouter): @@ -80,7 +75,6 @@ def protocol_error_handler(self, request, exc: ProtocolError): def run(self): """Starts FastAPI server and event handler.""" - # with self.lifecycle.run(): self.build_app() uvicorn.run( diff --git a/src/koi_net/lifecycle.py b/src/koi_net/lifecycle.py deleted file mode 100644 index 93382e7..0000000 --- a/src/koi_net/lifecycle.py +++ /dev/null @@ -1,135 +0,0 @@ -import structlog -from contextlib import contextmanager, asynccontextmanager - -from rid_lib.ext import Bundle -from rid_lib.types import KoiNetNode - -from koi_net.config.loader import ConfigLoader -from koi_net.secure_manager import SecureManager - -from .sync_manager import SyncManager -from .handshaker import Handshaker -from .workers.kobj_worker import KnowledgeProcessingWorker -from .network.event_queue import EventQueue -from .workers import EventProcessingWorker -from .config.core import NodeConfig -from .processor.kobj_queue import KobjQueue -from .network.graph import NetworkGraph -from .identity import NodeIdentity - -log = structlog.stdlib.get_logger() - - -class NodeLifecycle: - """Manages node startup and shutdown processes.""" - - config: NodeConfig - config_loader: ConfigLoader - identity: NodeIdentity - graph: NetworkGraph - kobj_queue: KobjQueue - kobj_worker: KnowledgeProcessingWorker - event_queue: EventQueue - event_worker: EventProcessingWorker - handshaker: Handshaker - sync_manager: SyncManager - secure_manager: SecureManager - - def __init__( - self, - config: NodeConfig, - config_loader: ConfigLoader, - identity: NodeIdentity, - graph: NetworkGraph, - kobj_queue: KobjQueue, - kobj_worker: KnowledgeProcessingWorker, - event_queue: EventQueue, - event_worker: EventProcessingWorker, - handshaker: Handshaker, - sync_manager: SyncManager, - secure_manager: SecureManager - ): - self.config = config - self.config_loader = config_loader - self.identity = identity - self.graph = graph - self.kobj_queue = kobj_queue - self.kobj_worker = kobj_worker - self.event_queue = event_queue - self.event_worker = event_worker - self.handshaker = handshaker - self.sync_manager = sync_manager - self.secure_manager = secure_manager - - @contextmanager - def run(self): - """Synchronous context manager for node startup and shutdown.""" - try: - log.info("Starting node lifecycle...") - self.start() - yield - except KeyboardInterrupt: - log.info("Keyboard interrupt!") - finally: - log.info("Stopping node lifecycle...") - self.stop() - - @asynccontextmanager - async def async_run(self): - """Asynchronous context manager for node startup and shutdown.""" - try: - log.info("Starting async node lifecycle...") - self.start() - yield - except KeyboardInterrupt: - log.info("Keyboard interrupt!") - finally: - log.info("Stopping async node lifecycle...") - self.stop() - - def start(self): - """Starts a node. - - Starts the processor thread (if enabled). Generates network - graph from nodes and edges in cache. Processes any state changes - of node bundle. Initiates handshake with first contact if node - doesn't have any neighbors. Catches up with coordinator state. - """ - - # attempt to load config from yaml, and write back changes (if any) - # self.config_loader.load_from_yaml() - # self.config_loader.save_to_yaml() - - # self.secure_manager.load_priv_key() - - # self.kobj_worker.start() - # self.event_worker.start() - # self.graph.generate() - - # refresh to reflect changes (if any) in config.yaml node profile - self.kobj_queue.push(bundle=Bundle.generate( - rid=self.identity.rid, - contents=self.identity.profile.model_dump() - )) - - # why am I doing this? waiting for self bundle to process before continuing in the process - self.kobj_queue.q.join() - - node_providers = self.graph.get_neighbors( - direction="in", - allowed_type=[KoiNetNode] - ) - - if node_providers: - log.debug(f"Catching up with `orn:koi-net.node` providers: {node_providers}") - self.sync_manager.catch_up_with(node_providers, [KoiNetNode]) - - elif self.config.koi_net.first_contact.rid: - log.debug(f"No edges with `orn:koi-net.node` providers, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") - self.handshaker.handshake_with(self.config.koi_net.first_contact.rid) - - def stop(self): - """Stops a node, send stop signals to workers.""" - - # self.kobj_worker.stop() - # self.event_worker.stop() \ No newline at end of file From ffba7f2d0b5951db49c6ab386184894cfedaacb7 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 2 Dec 2025 23:16:12 -0500 Subject: [PATCH 44/53] moved assembler, component, container, and consts to shared build module, cleaned up documentation --- src/koi_net/assembler_consts.py | 5 -- src/koi_net/{ => build}/artifact.py | 106 +++++++++++++++------------ src/koi_net/{ => build}/assembler.py | 4 +- src/koi_net/build/component.py | 9 +++ src/koi_net/build/consts.py | 13 ++++ src/koi_net/{ => build}/container.py | 2 +- src/koi_net/component.py | 6 -- src/koi_net/config/core.py | 6 +- src/koi_net/core.py | 2 +- src/koi_net/entrypoints/poller.py | 2 +- 10 files changed, 89 insertions(+), 66 deletions(-) delete mode 100644 src/koi_net/assembler_consts.py rename src/koi_net/{ => build}/artifact.py (61%) rename src/koi_net/{ => build}/assembler.py (95%) create mode 100644 src/koi_net/build/component.py create mode 100644 src/koi_net/build/consts.py rename src/koi_net/{ => build}/container.py (95%) delete mode 100644 src/koi_net/component.py diff --git a/src/koi_net/assembler_consts.py b/src/koi_net/assembler_consts.py deleted file mode 100644 index 86488f3..0000000 --- a/src/koi_net/assembler_consts.py +++ /dev/null @@ -1,5 +0,0 @@ -START_FUNC_NAME = "start" -STOP_FUNC_NAME = "stop" -START_ORDER_OVERRIDE = "_start_order" -STOP_ORDER_OVERRIDE = "_stop_order" -COMP_TYPE_OVERRIDE = "_comp_type" \ No newline at end of file diff --git a/src/koi_net/artifact.py b/src/koi_net/build/artifact.py similarity index 61% rename from src/koi_net/artifact.py rename to src/koi_net/build/artifact.py index 3117cab..5c0ef3f 100644 --- a/src/koi_net/artifact.py +++ b/src/koi_net/build/artifact.py @@ -1,30 +1,36 @@ import inspect from collections import deque -from enum import StrEnum -from typing import Any +from typing import TYPE_CHECKING, Any from pydantic import BaseModel -from koi_net.assembler_consts import COMP_TYPE_OVERRIDE, START_FUNC_NAME, START_ORDER_OVERRIDE, STOP_FUNC_NAME, STOP_ORDER_OVERRIDE +if TYPE_CHECKING: + from .assembler import NodeAssembler +from .consts import ( + COMP_TYPE_OVERRIDE, + START_FUNC_NAME, + START_ORDER_OVERRIDE, + STOP_FUNC_NAME, + STOP_ORDER_OVERRIDE, + CompType +) -class CompType(StrEnum): - FACTORY = "FACTORY" - OBJECT = "OBJECT" class AssemblyArtifact: - assembler: Any + assembler: "NodeAssembler" comp_dict: dict[str, Any] dep_graph: dict[str, list[str]] comp_types: dict[str, CompType] init_order: list[str] start_order: list[str] stop_order: list[str] + graphviz: str - def __init__(self, assembler): + def __init__(self, assembler: "NodeAssembler"): self.assembler = assembler def collect_comps(self): - """Collects components into `comp_dict` from class definition.""" + """Collects components from class definition.""" self.comp_dict = {} # adds components from class and all base classes. skips `type`, and runs in reverse so that sub classes override super class values @@ -39,53 +45,60 @@ def collect_comps(self): def build_dependencies(self): """Builds dependency graph and component type map. - Graph representation is an adjacency list: each key is a component - name, and the value is a tuple containing a list of dependency - component names. + Graph representation is an adjacency list: the key is a component + name, and the value is a tuple containing names of the depedencies. """ self.comp_types = {} self.dep_graph = {} for comp_name, comp in self.comp_dict.items(): - explicit_type = getattr(comp, COMP_TYPE_OVERRIDE, None) dep_names = [] + + explicit_type = getattr(comp, COMP_TYPE_OVERRIDE, None) if explicit_type: self.comp_types[comp_name] = explicit_type + # non callable components are objects treated "as is" elif not callable(comp): self.comp_types[comp_name] = CompType.OBJECT - elif isinstance(comp, type) and issubclass(comp, BaseModel): - self.comp_types[comp_name] = CompType.OBJECT - + # callable components default to singletons else: sig = inspect.signature(comp) - self.comp_types[comp_name] = CompType.FACTORY + self.comp_types[comp_name] = CompType.SINGLETON dep_names = list(sig.parameters) + invalid_deps = set(dep_names) - set(self.comp_dict) + if invalid_deps: + raise Exception(f"Dependencies {invalid_deps} of component '{comp_name}' are undefined") + self.dep_graph[comp_name] = dep_names [print(f"{i}: {comp_name} -> {deps}") for i, (comp_name, deps) in enumerate(self.dep_graph.items())] def build_init_order(self): - # adj list: n -> outgoing neighbors + """Builds component initialization order using Kahn's algorithm.""" + # adj list: n -> outgoing neighbors + adj = self.dep_graph # reverse adj list: n -> incoming neighbors r_adj: dict[str, list[str]] = {} # computes reverse adjacency list - for node in self.dep_graph: + for node in adj: r_adj.setdefault(node, []) - for n in self.dep_graph[node]: + for n in adj[node]: r_adj.setdefault(n, []) r_adj[n].append(node) - out_degree: dict[str, int] = { + # how many outgoing edges each node has + out_degree = { n: len(neighbors) - for n, neighbors in self.dep_graph.items() + for n, neighbors in adj.items() } + # initializing queue: nodes w/o dependencies queue = deque() for node in out_degree: if out_degree[node] == 0: @@ -93,56 +106,55 @@ def build_init_order(self): self.init_order = [] while queue: + # removes node from graph n = queue.popleft() self.init_order.append(n) - + + # updates out degree for nodes dependent on this node for next_n in r_adj[n]: out_degree[next_n] -= 1 + # adds nodes now without dependencies to queue if out_degree[next_n] == 0: queue.append(next_n) if len(self.init_order) != len(self.dep_graph): - cycle_nodes = set(self.dep_graph.keys()) - set(self.init_order) + cycle_nodes = set(self.dep_graph) - set(self.init_order) raise Exception(f"Found cycle in dependency graph, the following nodes could not be ordered: {cycle_nodes}") print("\ninit order") [print(f"{i}: {comp_name}") for i, comp_name in enumerate(self.init_order)] def build_start_order(self): - start_order_override = getattr( - self.assembler, START_ORDER_OVERRIDE, None) - - if start_order_override: - self.start_order = start_order_override - else: - self.start_order = [] - for comp_name in self.init_order: - comp = self.comp_dict[comp_name] - if getattr(comp, START_FUNC_NAME, None): - self.start_order.append(comp_name) + """Builds component start order. + + Checks if components define a start function in init order. Can + be overridden by setting start order override in the `NodeAssembler`. + """ + self.start_order = getattr(self.assembler, START_ORDER_OVERRIDE, None) or [ + comp_name for comp_name in self.init_order + if getattr(self.comp_dict[comp_name], START_FUNC_NAME, None) + ] print("\nstart order") [print(f"{i}: {comp_name}") for i, comp_name in enumerate(self.start_order)] def build_stop_order(self): - stop_order_override = getattr( - self.assembler, STOP_ORDER_OVERRIDE, None) - - if stop_order_override: - self.stop_order = stop_order_override - else: - self.stop_order = [] - for comp_name in reversed(self.init_order): - comp = self.comp_dict[comp_name] - if getattr(comp, STOP_FUNC_NAME, None): - self.stop_order.append(comp_name) + """Builds component stop order. + + Checks if components define a stop function in init order. Can + be overridden by setting stop order override in the `NodeAssembler`. + """ + self.stop_order = getattr(self.assembler, STOP_ORDER_OVERRIDE, None) or [ + comp_name for comp_name in self.init_order + if getattr(self.comp_dict[comp_name], STOP_FUNC_NAME, None) + ] print("\nstop order") [print(f"{i}: {comp_name}") for i, comp_name in enumerate(self.stop_order)] def visualize(self) -> str: - """Returns representation of dependency graph in Graphviz DOT language.""" + """Creates representation of dependency graph in Graphviz DOT language.""" s = "digraph G {\n" for node, neighbors in self.dep_graph.items(): diff --git a/src/koi_net/assembler.py b/src/koi_net/build/assembler.py similarity index 95% rename from src/koi_net/assembler.py rename to src/koi_net/build/assembler.py index 00690b1..a0709d3 100644 --- a/src/koi_net/assembler.py +++ b/src/koi_net/build/assembler.py @@ -9,10 +9,10 @@ log = structlog.stdlib.get_logger() - class NodeAssembler: _artifact: AssemblyArtifact = None + # optional order overrides: _start_order: list[str] _stop_order: list[str] @@ -45,7 +45,7 @@ def _build_components(artifact: AssemblyArtifact): if comp_type == CompType.OBJECT: components[comp_name] = comp - elif comp_type == CompType.FACTORY: + elif comp_type == CompType.SINGLETON: # builds depedency dict for current component dependencies = {} for dep in artifact.dep_graph[comp_name]: diff --git a/src/koi_net/build/component.py b/src/koi_net/build/component.py new file mode 100644 index 0000000..f2f5de6 --- /dev/null +++ b/src/koi_net/build/component.py @@ -0,0 +1,9 @@ +from .consts import COMP_TYPE_OVERRIDE +from .artifact import CompType + +def object(cls): + setattr(cls, COMP_TYPE_OVERRIDE, CompType.OBJECT) + return cls + +def factory(cls): + setattr(cls, COMP_TYPE_OVERRIDE, CompType.FACTORY) \ No newline at end of file diff --git a/src/koi_net/build/consts.py b/src/koi_net/build/consts.py new file mode 100644 index 0000000..fafe50f --- /dev/null +++ b/src/koi_net/build/consts.py @@ -0,0 +1,13 @@ +from enum import StrEnum + + +START_FUNC_NAME = "start" +STOP_FUNC_NAME = "stop" +START_ORDER_OVERRIDE = "_start_order" +STOP_ORDER_OVERRIDE = "_stop_order" +COMP_TYPE_OVERRIDE = "_comp_type" + +class CompType(StrEnum): + SINGLETON = "SINGLETON" + FACTORY = "FACTORY" + OBJECT = "OBJECT" diff --git a/src/koi_net/container.py b/src/koi_net/build/container.py similarity index 95% rename from src/koi_net/container.py rename to src/koi_net/build/container.py index 9693d1b..7b75d46 100644 --- a/src/koi_net/container.py +++ b/src/koi_net/build/container.py @@ -1,6 +1,6 @@ import structlog -from .entrypoints.base import EntryPoint +from ..entrypoints.base import EntryPoint from .artifact import AssemblyArtifact log = structlog.stdlib.get_logger() diff --git a/src/koi_net/component.py b/src/koi_net/component.py deleted file mode 100644 index ab29877..0000000 --- a/src/koi_net/component.py +++ /dev/null @@ -1,6 +0,0 @@ -from .assembler_consts import COMP_TYPE_OVERRIDE -from .artifact import CompType - -def static(cls): - setattr(cls, COMP_TYPE_OVERRIDE, CompType.OBJECT) - return cls \ No newline at end of file diff --git a/src/koi_net/config/core.py b/src/koi_net/config/core.py index c3ed5e6..33edf74 100644 --- a/src/koi_net/config/core.py +++ b/src/koi_net/config/core.py @@ -5,8 +5,8 @@ from rid_lib.types import KoiNetNode import structlog -from koi_net import component -from koi_net.protocol.secure import PrivateKey +from ..build import component +from ..protocol.secure import PrivateKey from ..protocol.node import NodeProfile log = structlog.stdlib.get_logger() @@ -68,7 +68,7 @@ def __getattribute__(self, name): # marking this component as static, classes are implicitly treated as # factories, but this needs to be passed as is -@component.static +@component.object class NodeConfig(BaseModel): """Base node config class, intended to be extended.""" diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 1f12c66..3033a61 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -1,6 +1,6 @@ from .cache import Cache from .log_system import LogSystem -from .assembler import NodeAssembler +from .build.assembler import NodeAssembler from .config.core import NodeConfig from .config.proxy import ConfigProxy from .config.loader import ConfigLoader diff --git a/src/koi_net/entrypoints/poller.py b/src/koi_net/entrypoints/poller.py index dd86c03..52de323 100644 --- a/src/koi_net/entrypoints/poller.py +++ b/src/koi_net/entrypoints/poller.py @@ -20,7 +20,7 @@ def __init__( self, config: PartialNodeConfig, kobj_queue: KobjQueue, - resolver: NetworkResolver, + resolver: NetworkResolver ): self.kobj_queue = kobj_queue self.resolver = resolver From 1e0e1169048b24959b5a5f2ab9448c3549f29211 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 2 Dec 2025 23:38:40 -0500 Subject: [PATCH 45/53] moved handshaker, profile monitor (renamed from self start), and sync manager to behaviors module. removed catcher upper component and split its start up functions between the handshaker and sync manager. --- src/koi_net/{ => behaviors}/handshaker.py | 34 +++++++++++++++--- .../profile_monitor.py} | 16 ++++----- src/koi_net/{ => behaviors}/sync_manager.py | 25 ++++++++++--- src/koi_net/catcher_upper.py | 36 ------------------- src/koi_net/core.py | 10 +++--- src/koi_net/network/error_handler.py | 6 ++-- src/koi_net/utils.py | 18 ---------- 7 files changed, 63 insertions(+), 82 deletions(-) rename src/koi_net/{ => behaviors}/handshaker.py (51%) rename src/koi_net/{self_starter.py => behaviors/profile_monitor.py} (52%) rename src/koi_net/{ => behaviors}/sync_manager.py (65%) delete mode 100644 src/koi_net/catcher_upper.py delete mode 100644 src/koi_net/utils.py diff --git a/src/koi_net/handshaker.py b/src/koi_net/behaviors/handshaker.py similarity index 51% rename from src/koi_net/handshaker.py rename to src/koi_net/behaviors/handshaker.py index bea8511..58fb278 100644 --- a/src/koi_net/handshaker.py +++ b/src/koi_net/behaviors/handshaker.py @@ -1,9 +1,12 @@ import structlog from rid_lib.ext import Cache from rid_lib.types import KoiNetNode -from koi_net.identity import NodeIdentity -from koi_net.network.event_queue import EventQueue -from .protocol.event import Event, EventType + +from ..network.graph import NetworkGraph +from ..config.core import NodeConfig +from ..identity import NodeIdentity +from ..network.event_queue import EventQueue +from ..protocol.event import Event, EventType log = structlog.stdlib.get_logger() @@ -14,11 +17,34 @@ def __init__( self, cache: Cache, identity: NodeIdentity, - event_queue: EventQueue + event_queue: EventQueue, + config: NodeConfig, + graph: NetworkGraph ): + self.config = config self.cache = cache self.identity = identity self.event_queue = event_queue + self.graph = graph + + def start(self): + """Attempts handshake with first contact on startup. + + Handshake occurs if first contact is set in the config, the first + contact is not already known to this node, and this node does not + already have incoming edges with node providers. + """ + if not self.config.koi_net.first_contact.rid: + return + + if self.cache.read(self.config.koi_net.first_contact.rid): + return + + if not self.graph.get_neighbors( + direction="in", allowed_type=KoiNetNode): + return + + self.handshake_with(self.config.koi_net.first_contact.rid) def handshake_with(self, target: KoiNetNode): """Initiates a handshake with target node. diff --git a/src/koi_net/self_starter.py b/src/koi_net/behaviors/profile_monitor.py similarity index 52% rename from src/koi_net/self_starter.py rename to src/koi_net/behaviors/profile_monitor.py index 43c4485..cd84b1d 100644 --- a/src/koi_net/self_starter.py +++ b/src/koi_net/behaviors/profile_monitor.py @@ -1,9 +1,10 @@ from rid_lib.ext import Bundle -from koi_net.identity import NodeIdentity -from koi_net.processor.kobj_queue import KobjQueue +from ..identity import NodeIdentity +from ..processor.kobj_queue import KobjQueue -class SelfStart: +class ProfileMonitor: + """Processes changes to node profile in the config.""" def __init__( self, kobj_queue: KobjQueue, @@ -13,15 +14,10 @@ def __init__( self.identity = identity def start(self): - print(self.identity.rid) - print(self.identity.profile) - + """Processes identity bundle generated from config.""" self_bundle = Bundle.generate( rid=self.identity.rid, contents=self.identity.profile.model_dump() ) - self.kobj_queue.push(bundle=self_bundle) - - # will freeze if called before kobj worker is started: - # self.kobj_queue.q.join() \ No newline at end of file + self.kobj_queue.push(bundle=self_bundle) \ No newline at end of file diff --git a/src/koi_net/sync_manager.py b/src/koi_net/behaviors/sync_manager.py similarity index 65% rename from src/koi_net/sync_manager.py rename to src/koi_net/behaviors/sync_manager.py index c53c398..94ae6a6 100644 --- a/src/koi_net/sync_manager.py +++ b/src/koi_net/behaviors/sync_manager.py @@ -1,10 +1,11 @@ from rid_lib.ext import Cache +from rid_lib.types import KoiNetNode -from .network.graph import NetworkGraph -from .network.request_handler import RequestHandler -from .processor.kobj_queue import KobjQueue -from .protocol.api_models import ErrorResponse -from .protocol.node import NodeProfile, NodeType +from ..network.graph import NetworkGraph +from ..network.request_handler import RequestHandler +from ..processor.kobj_queue import KobjQueue +from ..protocol.api_models import ErrorResponse +from ..protocol.node import NodeProfile, NodeType class SyncManager: @@ -25,6 +26,20 @@ def __init__( self.cache = cache self.request_handler = request_handler self.kobj_queue = kobj_queue + + def start(self): + """Catches up with node providers on startup.""" + + node_providers = self.graph.get_neighbors( + direction="in", + allowed_type=KoiNetNode + ) + + if not node_providers: + return + + # log.debug(f"Catching up with `orn:koi-net.node` providers: {node_providers}") + self.catch_up_with(node_providers, [KoiNetNode]) def catch_up_with(self, nodes, rid_types): """Catches up with the state of RID types within other nodes.""" diff --git a/src/koi_net/catcher_upper.py b/src/koi_net/catcher_upper.py deleted file mode 100644 index 91d8b35..0000000 --- a/src/koi_net/catcher_upper.py +++ /dev/null @@ -1,36 +0,0 @@ -import structlog -from rid_lib.types import KoiNetNode -from koi_net.config.core import NodeConfig -from koi_net.handshaker import Handshaker -from koi_net.network.graph import NetworkGraph -from koi_net.sync_manager import SyncManager - -log = structlog.stdlib.get_logger() - - -class CatcherUpper: - def __init__( - self, - graph: NetworkGraph, - sync_manager: SyncManager, - handshaker: Handshaker, - config: NodeConfig - ): - self.graph = graph - self.sync_manager = sync_manager - self.handshaker = handshaker - self.config = config - - def start(self): - node_providers = self.graph.get_neighbors( - direction="in", - allowed_type=KoiNetNode - ) - - if node_providers: - log.debug(f"Catching up with `orn:koi-net.node` providers: {node_providers}") - self.sync_manager.catch_up_with(node_providers, [KoiNetNode]) - - elif self.config.koi_net.first_contact.rid: - log.debug(f"No edges with `orn:koi-net.node` providers, reaching out to first contact {self.config.koi_net.first_contact.rid!r}") - self.handshaker.handshake_with(self.config.koi_net.first_contact.rid) \ No newline at end of file diff --git a/src/koi_net/core.py b/src/koi_net/core.py index 3033a61..b4f8f90 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -6,8 +6,8 @@ from .config.loader import ConfigLoader from .processor.context import HandlerContext from .effector import DerefHandler, Effector -from .handshaker import Handshaker -from .sync_manager import SyncManager +from .behaviors.handshaker import Handshaker +from .behaviors.sync_manager import SyncManager from .identity import NodeIdentity from .workers import KnowledgeProcessingWorker, EventProcessingWorker from .network.error_handler import ErrorHandler @@ -21,8 +21,7 @@ from .processor.kobj_queue import KobjQueue from .processor.handler import KnowledgeHandler from .secure_manager import SecureManager -from .catcher_upper import CatcherUpper -from .self_starter import SelfStart +from .behaviors.profile_monitor import ProfileMonitor from .entrypoints import NodeServer, NodePoller from .processor.knowledge_handlers import ( basic_manifest_handler, @@ -68,8 +67,7 @@ class BaseNode(NodeAssembler): pipeline: KnowledgePipeline = KnowledgePipeline kobj_worker: KnowledgeProcessingWorker = KnowledgeProcessingWorker event_worker: EventProcessingWorker = EventProcessingWorker - catcher_upper: CatcherUpper = CatcherUpper - self_start: SelfStart = SelfStart + profile_monitor: ProfileMonitor = ProfileMonitor class FullNode(BaseNode): entrypoint: NodeServer = NodeServer diff --git a/src/koi_net/network/error_handler.py b/src/koi_net/network/error_handler.py index bdc2c33..f6dd775 100644 --- a/src/koi_net/network/error_handler.py +++ b/src/koi_net/network/error_handler.py @@ -1,8 +1,8 @@ import structlog -from koi_net.handshaker import Handshaker -from koi_net.protocol.errors import ErrorType -from koi_net.protocol.event import EventType from rid_lib.types import KoiNetNode +from ..behaviors.handshaker import Handshaker +from ..protocol.errors import ErrorType +from ..protocol.event import EventType from ..processor.kobj_queue import KobjQueue log = structlog.stdlib.get_logger() diff --git a/src/koi_net/utils.py b/src/koi_net/utils.py deleted file mode 100644 index 43f0db8..0000000 --- a/src/koi_net/utils.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Callable - -from rid_lib import RID -from rid_lib.ext import Bundle, Cache - -cache = Cache() - -def build_dereferencer( - *funcs: list[Callable[[RID], Bundle | None]] -) -> Callable[[RID], Bundle | None]: - def any_of(rid: RID): - return any( - f(rid) for f in funcs - ) - return any_of - -deref = build_dereferencer(cache.read) -deref(RID.from_string("string:hello_world")) \ No newline at end of file From dc795475507d7328dd753525bfad5f79a6f181e5 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 2 Dec 2025 23:47:17 -0500 Subject: [PATCH 46/53] cleaning up, added get_logger method in log system, may have components import log as a dependency --- src/koi_net/behaviors/sync_manager.py | 5 ++++- src/koi_net/build/container.py | 9 +++++++-- src/koi_net/identity.py | 5 +---- src/koi_net/log_system.py | 7 +++++-- 4 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/koi_net/behaviors/sync_manager.py b/src/koi_net/behaviors/sync_manager.py index 94ae6a6..0ef37ef 100644 --- a/src/koi_net/behaviors/sync_manager.py +++ b/src/koi_net/behaviors/sync_manager.py @@ -1,3 +1,4 @@ +import structlog from rid_lib.ext import Cache from rid_lib.types import KoiNetNode @@ -7,6 +8,8 @@ from ..protocol.api_models import ErrorResponse from ..protocol.node import NodeProfile, NodeType +log = structlog.stdlib.get_logger() + class SyncManager: """Handles state synchronization actions with other nodes.""" @@ -38,7 +41,7 @@ def start(self): if not node_providers: return - # log.debug(f"Catching up with `orn:koi-net.node` providers: {node_providers}") + log.debug(f"Catching up with `orn:koi-net.node` providers: {node_providers}") self.catch_up_with(node_providers, [KoiNetNode]) def catch_up_with(self, nodes, rid_types): diff --git a/src/koi_net/build/container.py b/src/koi_net/build/container.py index 7b75d46..3fe0b15 100644 --- a/src/koi_net/build/container.py +++ b/src/koi_net/build/container.py @@ -2,6 +2,7 @@ from ..entrypoints.base import EntryPoint from .artifact import AssemblyArtifact +from .consts import START_FUNC_NAME, STOP_FUNC_NAME log = structlog.stdlib.get_logger() @@ -9,11 +10,13 @@ class NodeContainer: """Dummy 'shape' for node containers built by assembler.""" _artifact: AssemblyArtifact + entrypoint: EntryPoint def __init__(self, artifact, **kwargs): self._artifact = artifact + # adds all components as attributes of this instance for name, comp in kwargs.items(): setattr(self, name, comp) @@ -30,10 +33,12 @@ def start(self): log.info("Starting node...") for comp_name in self._artifact.start_order: comp = getattr(self, comp_name) - comp.start() + start_func = getattr(comp, START_FUNC_NAME) + start_func() def stop(self): log.info("Stopping node...") for comp_name in self._artifact.stop_order: comp = getattr(self, comp_name) - comp.stop() \ No newline at end of file + stop_func = getattr(comp, STOP_FUNC_NAME) + stop_func() \ No newline at end of file diff --git a/src/koi_net/identity.py b/src/koi_net/identity.py index e4ca08b..4723d40 100644 --- a/src/koi_net/identity.py +++ b/src/koi_net/identity.py @@ -1,11 +1,8 @@ -import structlog -from rid_lib.types.koi_net_node import KoiNetNode +from rid_lib.types import KoiNetNode from .config.core import NodeConfig from .protocol.node import NodeProfile -log = structlog.stdlib.get_logger() - class NodeIdentity: """Represents a node's identity (RID, profile).""" diff --git a/src/koi_net/log_system.py b/src/koi_net/log_system.py index 3c4cad2..95df0a6 100644 --- a/src/koi_net/log_system.py +++ b/src/koi_net/log_system.py @@ -89,7 +89,7 @@ class LogSystem: """Handles initializing the logging system.""" - def __init__(self): + def __init__(self): file_handler = RotatingFileHandler( filename="log.ndjson", maxBytes=10 * 1024 * 1024, @@ -138,4 +138,7 @@ def __init__(self): wrapper_class=structlog.stdlib.BoundLogger, logger_factory=structlog.stdlib.LoggerFactory(), cache_logger_on_first_use=True, - ) \ No newline at end of file + ) + + def get_logger(self): + return structlog.stdlib.get_logger() \ No newline at end of file From 366b69251fc766b1977d84d8efae4f4c1a05df14 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Tue, 2 Dec 2025 23:47:41 -0500 Subject: [PATCH 47/53] version bump -> 1.2.0-beta.7 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bb6d799..19d74f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "koi-net" -version = "1.2.0b6" +version = "1.2.0b7" description = "Implementation of KOI-net protocol in Python" authors = [ {name = "Luke Miller", email = "luke@block.science"} From 3ccd599be9deb695e95bf393e43b6cde1c1af46a Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 3 Dec 2025 14:31:05 -0500 Subject: [PATCH 48/53] renamed assembly artifact -> build artifact, added a new comp_order decorator to give start and stop orderer more information, comp_order.worker starts worker components before and stops them after all other components, renamed component -> comp_type, removed factory (for now), default is singleton, moved build_app() call to init of server, not run method, added comp_order.worker to base worker class, version bump -> 1.2.0-beta.8 --- pyproject.toml | 2 +- src/koi_net/build/artifact.py | 58 +++++++++++++++++++++++-------- src/koi_net/build/assembler.py | 8 ++--- src/koi_net/build/comp_order.py | 6 ++++ src/koi_net/build/comp_type.py | 7 ++++ src/koi_net/build/component.py | 9 ----- src/koi_net/build/consts.py | 7 +++- src/koi_net/build/container.py | 4 +-- src/koi_net/config/core.py | 4 +-- src/koi_net/entrypoints/server.py | 4 +-- src/koi_net/workers/base.py | 3 ++ 11 files changed, 77 insertions(+), 35 deletions(-) create mode 100644 src/koi_net/build/comp_order.py create mode 100644 src/koi_net/build/comp_type.py delete mode 100644 src/koi_net/build/component.py diff --git a/pyproject.toml b/pyproject.toml index 19d74f2..39a83c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "koi-net" -version = "1.2.0b7" +version = "1.2.0b8" description = "Implementation of KOI-net protocol in Python" authors = [ {name = "Luke Miller", email = "luke@block.science"} diff --git a/src/koi_net/build/artifact.py b/src/koi_net/build/artifact.py index 5c0ef3f..a18830e 100644 --- a/src/koi_net/build/artifact.py +++ b/src/koi_net/build/artifact.py @@ -1,22 +1,23 @@ import inspect from collections import deque from typing import TYPE_CHECKING, Any -from pydantic import BaseModel - -if TYPE_CHECKING: - from .assembler import NodeAssembler from .consts import ( + COMP_ORDER_OVERRIDE, COMP_TYPE_OVERRIDE, START_FUNC_NAME, START_ORDER_OVERRIDE, STOP_FUNC_NAME, STOP_ORDER_OVERRIDE, + CompOrder, CompType ) +if TYPE_CHECKING: + from .assembler import NodeAssembler + -class AssemblyArtifact: +class BuildArtifact: assembler: "NodeAssembler" comp_dict: dict[str, Any] dep_graph: dict[str, list[str]] @@ -130,10 +131,24 @@ def build_start_order(self): Checks if components define a start function in init order. Can be overridden by setting start order override in the `NodeAssembler`. """ - self.start_order = getattr(self.assembler, START_ORDER_OVERRIDE, None) or [ - comp_name for comp_name in self.init_order - if getattr(self.comp_dict[comp_name], START_FUNC_NAME, None) - ] + + self.start_order = getattr(self.assembler, START_ORDER_OVERRIDE, None) + + if self.start_order: + return + + workers = [] + start_order = [] + for comp_name in self.init_order: + comp = self.comp_dict[comp_name] + if getattr(comp, START_FUNC_NAME, None): + if getattr(comp, COMP_ORDER_OVERRIDE, None) == CompOrder.WORKER: + workers.append(comp_name) + else: + start_order.append(comp_name) + + # order workers first + self.start_order = workers + start_order print("\nstart order") [print(f"{i}: {comp_name}") for i, comp_name in enumerate(self.start_order)] @@ -144,11 +159,26 @@ def build_stop_order(self): Checks if components define a stop function in init order. Can be overridden by setting stop order override in the `NodeAssembler`. """ - self.stop_order = getattr(self.assembler, STOP_ORDER_OVERRIDE, None) or [ - comp_name for comp_name in self.init_order - if getattr(self.comp_dict[comp_name], STOP_FUNC_NAME, None) - ] - + self.stop_order = getattr(self.assembler, STOP_ORDER_OVERRIDE, None) + + if self.stop_order: + return + + workers = [] + stop_order = [] + for comp_name in self.init_order: + comp = self.comp_dict[comp_name] + if getattr(comp, STOP_FUNC_NAME, None): + if getattr(comp, COMP_ORDER_OVERRIDE, None) == CompOrder.WORKER: + workers.append(comp_name) + else: + stop_order.append(comp_name) + + # order workers first (last) + self.stop_order = workers + stop_order + # reverse order from start order + self.stop_order.reverse() + print("\nstop order") [print(f"{i}: {comp_name}") for i, comp_name in enumerate(self.stop_order)] diff --git a/src/koi_net/build/assembler.py b/src/koi_net/build/assembler.py index a0709d3..9c60927 100644 --- a/src/koi_net/build/assembler.py +++ b/src/koi_net/build/assembler.py @@ -3,14 +3,14 @@ import structlog -from .artifact import AssemblyArtifact, CompType +from .artifact import BuildArtifact, CompType from .container import NodeContainer log = structlog.stdlib.get_logger() class NodeAssembler: - _artifact: AssemblyArtifact = None + _artifact: BuildArtifact = None # optional order overrides: _start_order: list[str] @@ -22,7 +22,7 @@ def __new__(cls) -> Self | NodeContainer: # builds assembly artifact if it doesn't exist if not cls._artifact: - cls._artifact = AssemblyArtifact(cls) + cls._artifact = BuildArtifact(cls) cls._artifact.build() components = cls._build_components(cls._artifact) @@ -30,7 +30,7 @@ def __new__(cls) -> Self | NodeContainer: return NodeContainer(cls._artifact, **components) @staticmethod - def _build_components(artifact: AssemblyArtifact): + def _build_components(artifact: BuildArtifact): """Returns assembled components as a dict.""" print("\nbuilding components") diff --git a/src/koi_net/build/comp_order.py b/src/koi_net/build/comp_order.py new file mode 100644 index 0000000..e042f6b --- /dev/null +++ b/src/koi_net/build/comp_order.py @@ -0,0 +1,6 @@ +from koi_net.build.consts import COMP_ORDER_OVERRIDE, CompOrder + + +def worker(cls): + setattr(cls, COMP_ORDER_OVERRIDE, CompOrder.WORKER) + return cls \ No newline at end of file diff --git a/src/koi_net/build/comp_type.py b/src/koi_net/build/comp_type.py new file mode 100644 index 0000000..e34fb77 --- /dev/null +++ b/src/koi_net/build/comp_type.py @@ -0,0 +1,7 @@ +from .consts import COMP_TYPE_OVERRIDE, CompType + + +def object(cls): + """Sets a component's type to `CompType.OBJECT`.""" + setattr(cls, COMP_TYPE_OVERRIDE, CompType.OBJECT) + return cls \ No newline at end of file diff --git a/src/koi_net/build/component.py b/src/koi_net/build/component.py deleted file mode 100644 index f2f5de6..0000000 --- a/src/koi_net/build/component.py +++ /dev/null @@ -1,9 +0,0 @@ -from .consts import COMP_TYPE_OVERRIDE -from .artifact import CompType - -def object(cls): - setattr(cls, COMP_TYPE_OVERRIDE, CompType.OBJECT) - return cls - -def factory(cls): - setattr(cls, COMP_TYPE_OVERRIDE, CompType.FACTORY) \ No newline at end of file diff --git a/src/koi_net/build/consts.py b/src/koi_net/build/consts.py index fafe50f..9101c92 100644 --- a/src/koi_net/build/consts.py +++ b/src/koi_net/build/consts.py @@ -3,11 +3,16 @@ START_FUNC_NAME = "start" STOP_FUNC_NAME = "stop" + START_ORDER_OVERRIDE = "_start_order" STOP_ORDER_OVERRIDE = "_stop_order" + COMP_TYPE_OVERRIDE = "_comp_type" +COMP_ORDER_OVERRIDE = "_comp_order" class CompType(StrEnum): SINGLETON = "SINGLETON" - FACTORY = "FACTORY" OBJECT = "OBJECT" + +class CompOrder(StrEnum): + WORKER = "WORKER" \ No newline at end of file diff --git a/src/koi_net/build/container.py b/src/koi_net/build/container.py index 3fe0b15..56ace2f 100644 --- a/src/koi_net/build/container.py +++ b/src/koi_net/build/container.py @@ -1,7 +1,7 @@ import structlog from ..entrypoints.base import EntryPoint -from .artifact import AssemblyArtifact +from .artifact import BuildArtifact from .consts import START_FUNC_NAME, STOP_FUNC_NAME log = structlog.stdlib.get_logger() @@ -9,7 +9,7 @@ class NodeContainer: """Dummy 'shape' for node containers built by assembler.""" - _artifact: AssemblyArtifact + _artifact: BuildArtifact entrypoint: EntryPoint diff --git a/src/koi_net/config/core.py b/src/koi_net/config/core.py index 33edf74..b64d763 100644 --- a/src/koi_net/config/core.py +++ b/src/koi_net/config/core.py @@ -5,7 +5,7 @@ from rid_lib.types import KoiNetNode import structlog -from ..build import component +from ..build import comp_type from ..protocol.secure import PrivateKey from ..protocol.node import NodeProfile @@ -68,7 +68,7 @@ def __getattribute__(self, name): # marking this component as static, classes are implicitly treated as # factories, but this needs to be passed as is -@component.object +@comp_type.object class NodeConfig(BaseModel): """Base node config class, intended to be extended.""" diff --git a/src/koi_net/entrypoints/server.py b/src/koi_net/entrypoints/server.py index 78fbacb..72266d3 100644 --- a/src/koi_net/entrypoints/server.py +++ b/src/koi_net/entrypoints/server.py @@ -28,6 +28,8 @@ def __init__( self.config = config self.response_handler = response_handler + self.build_app() + def build_endpoints(self, router: APIRouter): """Builds endpoints for API router.""" for path, models in API_MODEL_MAP.items(): @@ -75,8 +77,6 @@ def protocol_error_handler(self, request, exc: ProtocolError): def run(self): """Starts FastAPI server and event handler.""" - self.build_app() - uvicorn.run( app=self.app, host=self.config.server.host, diff --git a/src/koi_net/workers/base.py b/src/koi_net/workers/base.py index c42f92b..d67ceb1 100644 --- a/src/koi_net/workers/base.py +++ b/src/koi_net/workers/base.py @@ -1,5 +1,7 @@ import threading +from koi_net.build import comp_order + class End: """Class for STOP_WORKER sentinel pushed to worker queues.""" @@ -7,6 +9,7 @@ class End: STOP_WORKER = End() +@comp_order.worker class ThreadWorker: """Base class for thread workers.""" From 425b18b2e0f689063651762d99eb3c8e06eabd1d Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 3 Dec 2025 16:59:45 -0500 Subject: [PATCH 49/53] readded access log to uvicorn, added more specific typehints for full node and partial node config --- src/koi_net/core.py | 6 +++++- src/koi_net/entrypoints/server.py | 3 +-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/koi_net/core.py b/src/koi_net/core.py index b4f8f90..fbf4596 100644 --- a/src/koi_net/core.py +++ b/src/koi_net/core.py @@ -4,6 +4,8 @@ from .config.core import NodeConfig from .config.proxy import ConfigProxy from .config.loader import ConfigLoader +from .config.full_node import FullNodeConfig +from .config.partial_node import PartialNodeConfig from .processor.context import HandlerContext from .effector import DerefHandler, Effector from .behaviors.handshaker import Handshaker @@ -71,6 +73,8 @@ class BaseNode(NodeAssembler): class FullNode(BaseNode): entrypoint: NodeServer = NodeServer + config: FullNodeConfig class PartialNode(BaseNode): - entrypoint: NodePoller = NodePoller \ No newline at end of file + entrypoint: NodePoller = NodePoller + config: PartialNodeConfig \ No newline at end of file diff --git a/src/koi_net/entrypoints/server.py b/src/koi_net/entrypoints/server.py index 72266d3..10920dc 100644 --- a/src/koi_net/entrypoints/server.py +++ b/src/koi_net/entrypoints/server.py @@ -81,6 +81,5 @@ def run(self): app=self.app, host=self.config.server.host, port=self.config.server.port, - log_config=None, - access_log=False + log_config=None ) \ No newline at end of file From a71b531a8bfa091bb32b2cf61e9503823d4fe5b6 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 3 Dec 2025 17:38:07 -0500 Subject: [PATCH 50/53] updated log system, achieved compatibility with uvicorn logs including errors and access! --- examples/coordinator.py | 3 +- examples/partial.py | 3 +- src/koi_net/log_system.py | 239 ++++++++++++++++++++------------------ 3 files changed, 126 insertions(+), 119 deletions(-) diff --git a/examples/coordinator.py b/examples/coordinator.py index a1cd33a..040491f 100644 --- a/examples/coordinator.py +++ b/examples/coordinator.py @@ -65,5 +65,4 @@ class CoordinatorNode(FullNode): knowledge_handlers = FullNode.knowledge_handlers + [handshake_handler] if __name__ == "__main__": - node = CoordinatorNode() - # node.entrypoint.run() \ No newline at end of file + CoordinatorNode().run() \ No newline at end of file diff --git a/examples/partial.py b/examples/partial.py index 4f0436b..a52e28b 100644 --- a/examples/partial.py +++ b/examples/partial.py @@ -12,5 +12,4 @@ class MyPartialNode(PartialNode): config_schema = MyPartialNodeConfig if __name__ == "__main__": - node = MyPartialNode() - node.run() \ No newline at end of file + MyPartialNode().run() \ No newline at end of file diff --git a/src/koi_net/log_system.py b/src/koi_net/log_system.py index 95df0a6..9f6b657 100644 --- a/src/koi_net/log_system.py +++ b/src/koi_net/log_system.py @@ -2,143 +2,152 @@ import logging from logging.handlers import RotatingFileHandler from datetime import datetime + import structlog import colorama -console_renderer = structlog.dev.ConsoleRenderer( - columns=[ - # Render the timestamp without the key name in yellow. - structlog.dev.Column( - "timestamp", - structlog.dev.KeyValueColumnFormatter( - key_style=None, - value_style=colorama.Style.DIM, - reset_style=colorama.Style.RESET_ALL, - value_repr=lambda t: datetime.fromisoformat(t).strftime("%Y-%m-%d %H:%M:%S"), - ), - ), - structlog.dev.Column( - "level", - structlog.dev.LogLevelColumnFormatter( - level_styles={ - level: colorama.Style.BRIGHT + color - for level, color in { - "critical": colorama.Fore.RED, - "exception": colorama.Fore.RED, - "error": colorama.Fore.RED, - "warn": colorama.Fore.YELLOW, - "warning": colorama.Fore.YELLOW, - "info": colorama.Fore.GREEN, - "debug": colorama.Fore.GREEN, - "notset": colorama.Back.RED, - }.items() - }, - reset_style=colorama.Style.RESET_ALL, - width=9 - ) - ), - # Render the event without the key name in bright magenta. - - # Default formatter for all keys not explicitly mentioned. The key is - # cyan, the value is green. - structlog.dev.Column( - "path", - structlog.dev.KeyValueColumnFormatter( - key_style=None, - value_style=colorama.Fore.MAGENTA, - reset_style=colorama.Style.RESET_ALL, - value_repr=str, - width=30 - ), - ), - # structlog.dev.Column( - # "func_name", - # structlog.dev.KeyValueColumnFormatter( - # key_style=None, - # value_style=colorama.Fore.MAGENTA, - # reset_style=colorama.Style.RESET_ALL, - # value_repr=str, - # prefix="(", - # postfix=")", - # width=15 - # ), - # ), - structlog.dev.Column( - "event", - structlog.dev.KeyValueColumnFormatter( - key_style=None, - value_style=colorama.Fore.WHITE, - reset_style=colorama.Style.RESET_ALL, - value_repr=str, - width=30 - ), - ), - structlog.dev.Column( - "", - structlog.dev.KeyValueColumnFormatter( - key_style=colorama.Fore.BLUE, - value_style=colorama.Fore.GREEN, - reset_style=colorama.Style.RESET_ALL, - value_repr=str, - ), - ) - ] -) - class LogSystem: """Handles initializing the logging system.""" - def __init__(self): + COMMON_PROCESSORS = [ + structlog.stdlib.add_logger_name, + structlog.stdlib.add_log_level, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.TimeStamper(fmt="iso"), + structlog.processors.UnicodeDecoder(), + structlog.processors.CallsiteParameterAdder({ + structlog.processors.CallsiteParameter.MODULE, + structlog.processors.CallsiteParameter.FUNC_NAME + }), + ] + + def __init__(self): + self.setup_logging() + + def console_handler(self): + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter( + structlog.stdlib.ProcessorFormatter( + processor=structlog.dev.ConsoleRenderer( + columns=[ + # Render the timestamp without the key name in yellow. + structlog.dev.Column( + "timestamp", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Style.DIM, + reset_style=colorama.Style.RESET_ALL, + value_repr=lambda t: datetime.fromisoformat(t).strftime("%Y-%m-%d %H:%M:%S"), + ), + ), + structlog.dev.Column( + "level", + structlog.dev.LogLevelColumnFormatter( + level_styles={ + level: colorama.Style.BRIGHT + color + for level, color in { + "critical": colorama.Fore.RED, + "exception": colorama.Fore.RED, + "error": colorama.Fore.RED, + "warn": colorama.Fore.YELLOW, + "warning": colorama.Fore.YELLOW, + "info": colorama.Fore.GREEN, + "debug": colorama.Fore.GREEN, + "notset": colorama.Back.RED, + }.items() + }, + reset_style=colorama.Style.RESET_ALL, + width=9 + ) + ), + # Render the event without the key name in bright magenta. + + # Default formatter for all keys not explicitly mentioned. The key is + # cyan, the value is green. + structlog.dev.Column( + "path", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Fore.MAGENTA, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + width=30 + ), + ), + # structlog.dev.Column( + # "func_name", + # structlog.dev.KeyValueColumnFormatter( + # key_style=None, + # value_style=colorama.Fore.MAGENTA, + # reset_style=colorama.Style.RESET_ALL, + # value_repr=str, + # prefix="(", + # postfix=")", + # width=15 + # ), + # ), + structlog.dev.Column( + "event", + structlog.dev.KeyValueColumnFormatter( + key_style=None, + value_style=colorama.Fore.WHITE, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + width=30 + ), + ), + structlog.dev.Column( + "", + structlog.dev.KeyValueColumnFormatter( + key_style=colorama.Fore.BLUE, + value_style=colorama.Fore.GREEN, + reset_style=colorama.Style.RESET_ALL, + value_repr=str, + ), + ) + ] + ), + foreign_pre_chain=self.COMMON_PROCESSORS + ) + ) + + return console_handler + + + + def file_handler(self): file_handler = RotatingFileHandler( filename="log.ndjson", - maxBytes=10 * 1024 * 1024, - backupCount=5, + maxBytes=10 * 1024 ** 2, + backupCount=50, encoding="utf-8" ) + file_handler.setFormatter( structlog.stdlib.ProcessorFormatter( - processor=structlog.processors.JSONRenderer() + processor=structlog.processors.JSONRenderer(), + foreign_pre_chain=self.COMMON_PROCESSORS ) ) - console_handler = logging.StreamHandler(sys.stdout) - console_handler.setFormatter( - structlog.stdlib.ProcessorFormatter( - processor=console_renderer - ) - ) + return file_handler - logging.basicConfig( - level=logging.DEBUG, - handlers=[file_handler, console_handler] - ) + def setup_logging(self): + handlers = [ + self.file_handler(), + self.console_handler() + ] + + logging.basicConfig(level=logging.DEBUG, handlers=handlers) structlog.configure( - processors=[ - structlog.stdlib.filter_by_level, - structlog.stdlib.add_logger_name, - structlog.stdlib.add_log_level, - structlog.stdlib.PositionalArgumentsFormatter(), - structlog.processors.TimeStamper(fmt="iso"), - # structlog.processors.StackInfoRenderer(), - structlog.processors.UnicodeDecoder(), - structlog.processors.CallsiteParameterAdder({ - structlog.processors.CallsiteParameter.MODULE, - structlog.processors.CallsiteParameter.FUNC_NAME - }), - # lambda _, __, event: { - # **event, - # "path": event["module"] + "." + event["func_name"] - # }, - # console_renderer - structlog.stdlib.ProcessorFormatter.wrap_for_formatter - - ], + processors=self.COMMON_PROCESSORS + [ + structlog.stdlib.ProcessorFormatter.wrap_for_formatter], wrapper_class=structlog.stdlib.BoundLogger, logger_factory=structlog.stdlib.LoggerFactory(), cache_logger_on_first_use=True, ) def get_logger(self): - return structlog.stdlib.get_logger() \ No newline at end of file + return structlog.stdlib.get_logger() From f197a8ef9e6bda5ef50fc942f846c397f9e15207 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 3 Dec 2025 17:39:53 -0500 Subject: [PATCH 51/53] version bump -> 1.2.0-beta.9 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 39a83c1..771da72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "koi-net" -version = "1.2.0b8" +version = "1.2.0b9" description = "Implementation of KOI-net protocol in Python" authors = [ {name = "Luke Miller", email = "luke@block.science"} From ea35e67f1966fad44c5494d2635214ed2d519e50 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 3 Dec 2025 17:51:43 -0500 Subject: [PATCH 52/53] handshaker bug fix, handshakes when there aren't any neighbors --- src/koi_net/behaviors/handshaker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/koi_net/behaviors/handshaker.py b/src/koi_net/behaviors/handshaker.py index 58fb278..c546af7 100644 --- a/src/koi_net/behaviors/handshaker.py +++ b/src/koi_net/behaviors/handshaker.py @@ -40,7 +40,7 @@ def start(self): if self.cache.read(self.config.koi_net.first_contact.rid): return - if not self.graph.get_neighbors( + if self.graph.get_neighbors( direction="in", allowed_type=KoiNetNode): return From 7c2408f368a85cdc2931bf6219ff77e64cbe60d5 Mon Sep 17 00:00:00 2001 From: lukvmil Date: Wed, 3 Dec 2025 17:52:44 -0500 Subject: [PATCH 53/53] version bump [FULL RELEASE] -> v1.2.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 771da72..ad577c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "koi-net" -version = "1.2.0b9" +version = "1.2.0" description = "Implementation of KOI-net protocol in Python" authors = [ {name = "Luke Miller", email = "luke@block.science"}