Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 14 additions & 2 deletions server/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -786,6 +786,10 @@ def process_plugin_events(db, plugin, plugEventsArr):
pluginEvents[index].status = "watched-not-changed"
index += 1

# Track objects whose state actually changed this cycle
# (only these will be recorded in Plugins_History)
changed_this_cycle = set()

# Loop thru events and check if previously available objects are missing
for tmpObj in pluginObjects:
isMissing = True
Expand All @@ -799,6 +803,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
if tmpObj.status != "missing-in-last-scan":
tmpObj.changed = timeNowUTC()
tmpObj.status = "missing-in-last-scan"
changed_this_cycle.add(tmpObj.idsHash)
# mylog('debug', [f'[Plugins] Missing from last scan (PrimaryID | SecondaryID): {tmpObj.primaryId} | {tmpObj.secondaryId}'])

# Merge existing plugin objects with newly discovered ones and update existing ones with new values
Expand All @@ -807,6 +812,7 @@ def process_plugin_events(db, plugin, plugEventsArr):
if tmpObjFromEvent.status == "not-processed":
# This is a new object as it was not discovered as "exists" previously
tmpObjFromEvent.status = "new"
changed_this_cycle.add(tmpObjFromEvent.idsHash)

pluginObjects.append(tmpObjFromEvent)
# update data of existing objects
Expand All @@ -815,6 +821,11 @@ def process_plugin_events(db, plugin, plugEventsArr):
for plugObj in pluginObjects:
# find corresponding object for the event and merge
if plugObj.idsHash == tmpObjFromEvent.idsHash:
if (
plugObj.status == "missing-in-last-scan"
or tmpObjFromEvent.status == "watched-changed"
):
changed_this_cycle.add(tmpObjFromEvent.idsHash)
pluginObjects[index] = combine_plugin_objects(
plugObj, tmpObjFromEvent
)
Expand Down Expand Up @@ -871,8 +882,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
if plugObj.status in statuses_to_report_on:
events_to_insert.append(values)

# combine all DB insert and update events into one for history
history_to_insert.append(values)
# Only record history for objects that actually changed this cycle
if plugObj.idsHash in changed_this_cycle:
history_to_insert.append(values)

mylog("debug", f"[Plugins] pluginEvents count: {len(pluginEvents)}")
mylog("debug", f"[Plugins] pluginObjects count: {len(pluginObjects)}")
Expand Down
199 changes: 199 additions & 0 deletions test/db_test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import sys, os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from db_test_helpers import make_db, insert_device, minutes_ago, DummyDB, down_event_macs, make_device_dict, sync_insert_devices
from db_test_helpers import make_plugin_db, make_plugin_dict, make_plugin_event_row, seed_plugin_object, plugin_history_rows, plugin_objects_rows, PluginFakeDB
"""

import sqlite3
Expand Down Expand Up @@ -351,3 +352,201 @@ def __init__(self, conn: sqlite3.Connection):

def commitDB(self) -> None:
self._conn.commit()


# ---------------------------------------------------------------------------
# Plugin tables DDL & helpers (used by test/server/test_plugin_history_filtering.py)
# ---------------------------------------------------------------------------

CREATE_PLUGINS_OBJECTS = """
CREATE TABLE IF NOT EXISTS Plugins_Objects(
"index" INTEGER PRIMARY KEY AUTOINCREMENT,
plugin TEXT NOT NULL,
objectPrimaryId TEXT NOT NULL,
objectSecondaryId TEXT NOT NULL,
dateTimeCreated TEXT NOT NULL,
dateTimeChanged TEXT NOT NULL,
watchedValue1 TEXT NOT NULL,
watchedValue2 TEXT NOT NULL,
watchedValue3 TEXT NOT NULL,
watchedValue4 TEXT NOT NULL,
"status" TEXT NOT NULL,
extra TEXT NOT NULL,
userData TEXT NOT NULL,
foreignKey TEXT NOT NULL,
syncHubNodeName TEXT,
helpVal1 TEXT,
helpVal2 TEXT,
helpVal3 TEXT,
helpVal4 TEXT,
objectGuid TEXT
);
"""

CREATE_PLUGINS_EVENTS = """
CREATE TABLE IF NOT EXISTS Plugins_Events(
"index" INTEGER PRIMARY KEY AUTOINCREMENT,
plugin TEXT NOT NULL,
objectPrimaryId TEXT NOT NULL,
objectSecondaryId TEXT NOT NULL,
dateTimeCreated TEXT NOT NULL,
dateTimeChanged TEXT NOT NULL,
watchedValue1 TEXT NOT NULL,
watchedValue2 TEXT NOT NULL,
watchedValue3 TEXT NOT NULL,
watchedValue4 TEXT NOT NULL,
"status" TEXT NOT NULL,
extra TEXT NOT NULL,
userData TEXT NOT NULL,
foreignKey TEXT NOT NULL,
syncHubNodeName TEXT,
helpVal1 TEXT,
helpVal2 TEXT,
helpVal3 TEXT,
helpVal4 TEXT,
objectGuid TEXT
);
"""

CREATE_PLUGINS_HISTORY = """
CREATE TABLE IF NOT EXISTS Plugins_History(
"index" INTEGER PRIMARY KEY AUTOINCREMENT,
plugin TEXT NOT NULL,
objectPrimaryId TEXT NOT NULL,
objectSecondaryId TEXT NOT NULL,
dateTimeCreated TEXT NOT NULL,
dateTimeChanged TEXT NOT NULL,
watchedValue1 TEXT NOT NULL,
watchedValue2 TEXT NOT NULL,
watchedValue3 TEXT NOT NULL,
watchedValue4 TEXT NOT NULL,
"status" TEXT NOT NULL,
extra TEXT NOT NULL,
userData TEXT NOT NULL,
foreignKey TEXT NOT NULL,
syncHubNodeName TEXT,
helpVal1 TEXT,
helpVal2 TEXT,
helpVal3 TEXT,
helpVal4 TEXT,
objectGuid TEXT
);
"""


class PluginFakeSQL:
"""Wraps a sqlite3.Cursor to provide the interface plugin.py expects."""
def __init__(self, cursor):
self._cursor = cursor

def execute(self, sql, params=None):
if params:
return self._cursor.execute(sql, params)
return self._cursor.execute(sql)

def executemany(self, sql, params_list):
return self._cursor.executemany(sql, params_list)


class PluginFakeDB:
"""Minimal DB facade expected by process_plugin_events."""
def __init__(self, conn):
self.sql_connection = conn
self.sql = PluginFakeSQL(conn.cursor())

def get_sql_array(self, query):
cur = self.sql_connection.cursor()
cur.execute(query)
return cur.fetchall()

def commitDB(self):
self.sql_connection.commit()


def make_plugin_db() -> tuple:
"""
Return a (PluginFakeDB, connection) backed by an in-memory SQLite
database with all three plugin tables created.
"""
conn = sqlite3.connect(":memory:")
conn.executescript(
CREATE_PLUGINS_OBJECTS + CREATE_PLUGINS_EVENTS + CREATE_PLUGINS_HISTORY
)
conn.commit()
db = PluginFakeDB(conn)
return db, conn


def make_plugin_dict(prefix: str, watched_columns=None) -> dict:
"""Return a minimal plugin dict compatible with process_plugin_events."""
return {
"unique_prefix": prefix,
"settings": [
{
"function": "WATCH",
"value": watched_columns or ["watchedValue1"],
},
],
}


def make_plugin_event_row(prefix: str, primary_id: str, secondary_id="sec",
watched1="val1", watched2="", watched3="",
watched4="", changed="2026-01-01 00:00:00",
extra="", user_data="", foreign_key="",
status="not-processed"):
"""Build a tuple mimicking a raw plugin output row (19 columns + index)."""
return (
0, # index (placeholder, not used for events)
prefix, # plugin
primary_id,
secondary_id,
changed, # dateTimeCreated
changed, # dateTimeChanged
watched1,
watched2,
watched3,
watched4,
status,
extra,
user_data,
foreign_key,
None, # syncHubNodeName
None, # helpVal1
None, # helpVal2
None, # helpVal3
None, # helpVal4
)


def seed_plugin_object(cur, prefix: str, primary_id: str,
secondary_id="sec", watched1="val1",
status="watched-not-changed",
changed="2026-01-01 00:00:00"):
"""Insert a row into Plugins_Objects to simulate a pre-existing object."""
cur.execute(
"""INSERT INTO Plugins_Objects
(plugin, objectPrimaryId, objectSecondaryId, dateTimeCreated,
dateTimeChanged, watchedValue1, watchedValue2, watchedValue3,
watchedValue4, status, extra, userData, foreignKey)
VALUES (?, ?, ?, ?, ?, ?, '', '', '', ?, '', '', '')""",
(prefix, primary_id, secondary_id, changed, changed, watched1, status),
)


def plugin_history_rows(conn, prefix: str):
"""Return all Plugins_History rows for a given plugin prefix."""
cur = conn.cursor()
cur.execute(
"SELECT * FROM Plugins_History WHERE plugin = ?", (prefix,)
)
return cur.fetchall()


def plugin_objects_rows(conn, prefix: str):
"""Return all Plugins_Objects rows for a given plugin prefix."""
cur = conn.cursor()
cur.execute(
"SELECT * FROM Plugins_Objects WHERE plugin = ?", (prefix,)
)
return cur.fetchall()
Loading
Loading