Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
from baserow.contrib.automation.data_providers.registries import (
automation_data_provider_type_registry,
)
from baserow.contrib.automation.history.models import AutomationNodeResult
from baserow.contrib.automation.history.handler import AutomationHistoryHandler
from baserow.contrib.automation.history.models import (
AutomationNodeHistory,
)
from baserow.contrib.automation.nodes.models import AutomationActionNode
from baserow.contrib.automation.workflows.models import AutomationWorkflow
from baserow.core.cache import local_cache
Expand All @@ -13,12 +16,12 @@


class AutomationDispatchContext(DispatchContext):
own_properties = ["workflow", "event_payload", "history_id"]
own_properties = ["workflow", "event_payload", "history"]

def __init__(
self,
workflow: AutomationWorkflow,
history_id: int,
history: AutomationNodeHistory,
event_payload: Optional[Union[Dict, List[Dict]]] = None,
simulate_until_node: Optional[AutomationActionNode] = None,
current_iterations: Optional[Dict[int, int]] = None,
Expand All @@ -29,7 +32,7 @@ def __init__(
node's changes.

:param workflow: The workflow that this dispatch context is associated with.
:param history_id: The AutomationWorkflowHistory ID from which the
:param history: The AutomationWorkflowHistory from which the
workflow's event payload and node results are derived.
:param event_payload: The event data from the trigger node, if any was
provided, as this is optional.
Expand All @@ -39,7 +42,7 @@ def __init__(
"""

self.workflow = workflow
self.history_id = history_id
self.history = history
self.simulate_until_node = simulate_until_node
self.current_iterations: Dict[int, int] = {}

Expand Down Expand Up @@ -72,36 +75,30 @@ def clone(self, **kwargs):
new_context.current_iterations = {**self.current_iterations}
return new_context

def _get_previous_results_cache_key(self) -> Optional[str]:
return f"wa_previous_nodes_results_{self.history_id}"

def _load_previous_results(self) -> Dict[int, Any]:
def get_iteration_path(self, node):
"""
Returns a dict where keys are the node IDs and values are the results
of the previous_nodes_results.
Compute the current iteration path for the given node.
"""
parent_nodes = node.get_parent_nodes()

results = {}
previous_results = AutomationNodeResult.objects.filter(
node_history__workflow_history_id=self.history_id
).select_related("node_history__node")
for result in previous_results:
results[result.node_history.node_id] = result.result
return ".".join([str(self.current_iterations[p.id]) for p in parent_nodes])

return results
def _get_previous_result_cache_key(self, node) -> Optional[str]:
return f"wa_previous_node_result_{self.history.id}_{node.id}"

@property
def data_provider_registry(self):
return automation_data_provider_type_registry

@property
def previous_nodes_results(self) -> Dict[int, Any]:
if cache_key := self._get_previous_results_cache_key():
return local_cache.get(
cache_key,
lambda: self._load_previous_results(),
)
return {}
def get_previous_node_result(self, node) -> Dict[int, Any]:
# We don't need to cache per iteration path because it won't change in this
# dispatch
return local_cache.get(
self._get_previous_result_cache_key(node),
lambda: AutomationHistoryHandler().get_node_result(
self.history, node, self.get_iteration_path(node)
),
)

def get_timezone_name(self) -> str:
"""
Expand All @@ -120,17 +117,21 @@ def sortings(self) -> Optional[str]:
def filters(self) -> Optional[str]:
return None

@property
def is_publicly_sortable(self) -> bool:
return False

@property
def is_publicly_filterable(self) -> bool:
return False

@property
def is_publicly_searchable(self) -> bool:
return False

@property
def public_allowed_properties(self) -> Optional[Dict[str, Dict[int, List[str]]]]:
return {}
return None

def search_query(self) -> Optional[str]:
return None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@
from baserow.contrib.automation.automation_dispatch_context import (
AutomationDispatchContext,
)
from baserow.contrib.automation.history.exceptions import (
AutomationWorkflowHistoryNodeResultDoesNotExist,
)
from baserow.contrib.automation.nodes.exceptions import AutomationNodeDoesNotExist
from baserow.contrib.automation.nodes.handler import AutomationNodeHandler
from baserow.core.formula.exceptions import InvalidFormulaContext
Expand Down Expand Up @@ -33,10 +36,10 @@ def get_data_chunk(
raise InvalidFormulaContext(message) from exc

try:
previous_node_results = dispatch_context.previous_nodes_results[
int(previous_node.id)
]
except KeyError as exc:
previous_node_result = dispatch_context.get_previous_node_result(
previous_node
)
except AutomationWorkflowHistoryNodeResultDoesNotExist as exc:
message = (
"The previous node id is not present in the dispatch context results"
)
Expand All @@ -45,7 +48,7 @@ def get_data_chunk(
service = previous_node.service.specific

if service.get_type().returns_list:
previous_node_results = previous_node_results["results"]
previous_node_result = previous_node_result["results"]
if len(rest) >= 2:
prepared_path = [
rest[0],
Expand All @@ -56,7 +59,7 @@ def get_data_chunk(
else:
prepared_path = service.get_type().prepare_value_path(service, rest)

return get_value_at_path(previous_node_results, prepared_path)
return get_value_at_path(previous_node_result, prepared_path)

def import_path(self, path, id_mapping, **kwargs):
"""
Expand Down Expand Up @@ -99,9 +102,7 @@ def get_data_chunk(
raise InvalidFormulaContext(message) from exc

try:
parent_node_results = dispatch_context.previous_nodes_results[
parent_node.id
]
parent_node_result = dispatch_context.get_previous_node_result(parent_node)
except KeyError as exc:
message = (
"The parent node id is not present in the dispatch context results"
Expand All @@ -116,7 +117,7 @@ def get_data_chunk(
)
raise InvalidFormulaContext(message) from exc

current_item = parent_node_results["results"][current_iteration]
current_item = parent_node_result["results"][current_iteration]
data = {"index": current_iteration, "item": current_item}

return get_value_at_path(data, rest)
Expand Down
4 changes: 4 additions & 0 deletions backend/src/baserow/contrib/automation/history/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,7 @@ def __init__(self, history_id=None, *args, **kwargs):
*args,
**kwargs,
)


class AutomationWorkflowHistoryNodeResultDoesNotExist(AutomationWorkflowHistoryError):
"""When the result entry doesn't exist for the given node/history."""
21 changes: 19 additions & 2 deletions backend/src/baserow/contrib/automation/history/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from baserow.contrib.automation.history.constants import HistoryStatusChoices
from baserow.contrib.automation.history.exceptions import (
AutomationWorkflowHistoryDoesNotExist,
AutomationWorkflowHistoryNodeResultDoesNotExist,
)
from baserow.contrib.automation.history.models import (
AutomationNodeHistory,
Expand Down Expand Up @@ -105,13 +106,29 @@ def create_node_result(
self,
node_history: AutomationNodeHistory,
result: Optional[Union[Dict, List[Dict]]] = None,
iteration: int = 0,
iteration_path: str = "",
) -> AutomationNodeResult:
"""Saves the result of a Node dispatch."""

result = result if result else {}
return AutomationNodeResult.objects.create(
node_history=node_history,
iteration=iteration,
iteration_path=iteration_path,
result=result,
)

def get_node_result(self, history, node, iteration_path):
"""
Returns the result for the given history/node/iteration_path.
"""

try:
node_result = AutomationNodeResult.objects.only("result").get(
node_history__workflow_history_id=history.id,
node_history__node_id=node.id,
iteration_path=iteration_path,
)
except AutomationNodeResult.DoesNotExist:
raise AutomationWorkflowHistoryNodeResultDoesNotExist()

return node_result.result
10 changes: 6 additions & 4 deletions backend/src/baserow/contrib/automation/history/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,6 @@ class AutomationHistory(models.Model):

message = models.TextField()

is_test_run = models.BooleanField(
db_default=False
) # TODO ZDM: Remove after next release

status = models.CharField(
choices=HistoryStatusChoices.choices,
max_length=8,
Expand Down Expand Up @@ -78,6 +74,12 @@ class AutomationNodeResult(models.Model):
iteration = models.PositiveIntegerField(
db_default=0,
help_text="Keeps track of the current iteration of the Iterator node.",
) # TODO ZDM: Remove after next release

iteration_path = models.CharField(
db_default="",
default="",
help_text="Keeps track of the iteration path that generated the result.",
)

result = models.JSONField(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Generated by Django 5.2.12 on 2026-03-19 10:23

from django.db import migrations, models
from django.db.models import CharField
from django.db.models.functions import Cast


def populate_iteration_path(apps, schema_editor):
AutomationNodeResult = apps.get_model("automation", "AutomationNodeResult")
AutomationNodeResult.objects.update(
iteration_path=Cast("iteration", output_field=CharField())
)


class Migration(migrations.Migration):

dependencies = [
('automation', '0024_automationworkflowhistory_event_payload_and_more'),
]

operations = [
migrations.AddField(
model_name='automationnoderesult',
name='iteration_path',
field=models.CharField(db_default='', default='', help_text='Keeps track of the iteration path that generated the result.'),
),
migrations.RunPython(populate_iteration_path, migrations.RunPython.noop),
migrations.RemoveField(
model_name='automationnodehistory',
name='is_test_run',
),
]
10 changes: 2 additions & 8 deletions backend/src/baserow/contrib/automation/nodes/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,7 @@ def dispatch_node(

dispatch_context = AutomationDispatchContext(
node.workflow,
history_id,
workflow_history,
event_payload=workflow_history.event_payload,
simulate_until_node=workflow_history.simulate_until_node,
current_iterations=current_iterations,
Expand Down Expand Up @@ -485,12 +485,6 @@ def dispatch_node(
self._handle_simulation_notify(simulate_until_node, node)
return None

iteration_index = 0
parent_nodes = node.get_parent_nodes()
if parent_nodes:
# Use the normalized iteration index from the context.
iteration_index = dispatch_context.current_iterations[parent_nodes[-1].id]

# Return early if this is a simulation as we've reached the
# simulated node.
if self._handle_simulation_notify(simulate_until_node, node):
Expand All @@ -499,7 +493,7 @@ def dispatch_node(
history_handler.create_node_result(
node_history=node_history,
result=dispatch_result.data,
iteration=iteration_index,
iteration_path=dispatch_context.get_iteration_path(node),
)

to_chain = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -768,7 +768,7 @@ def toggle_test_run(
# This is a placeholder value, no actual history exists yet
# (it's created later in start_workflow). This is fine
# for now, because get_sample_data() doesn't use history.
history_id=0,
history=None,
simulate_until_node=simulate_until_node,
)
if workflow.can_immediately_be_tested() or (
Expand Down
5 changes: 4 additions & 1 deletion backend/src/baserow/contrib/database/api/views/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -2122,11 +2122,14 @@ def get(self, request: Request, slug: str) -> Response:
raise ViewDoesNotExist()

field_options = view_type.get_visible_field_options_in_order(view_specific)
ordered_field_ids = list(field_options.values_list("field_id", flat=True))
fields = specific_iterator(
Field.objects.filter(id__in=field_options.values_list("field_id"))
Field.objects.filter(id__in=ordered_field_ids)
.select_related("content_type")
.prefetch_related("select_options")
)
field_id_order = {fid: idx for idx, fid in enumerate(ordered_field_ids)}
fields = sorted(fields, key=lambda f: field_id_order.get(f.id, 0))

return Response(
PublicViewInfoSerializer(
Expand Down
5 changes: 4 additions & 1 deletion backend/src/baserow/contrib/database/views/view_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,9 +263,12 @@ def after_fields_type_change(self, fields):
)

def get_visible_field_options_in_order(self, grid_view):
group_by_field_ids = grid_view.viewgroupby_set.values_list(
"field_id", flat=True
)
return (
grid_view.get_field_options(create_if_missing=True)
.filter(hidden=False)
.filter(Q(hidden=False) | Q(field_id__in=group_by_field_ids))
.order_by("-field__primary", "order", "field__id")
)

Expand Down
17 changes: 17 additions & 0 deletions backend/src/baserow/contrib/database/ws/public/views/signals.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,23 @@ def public_view_filter_deleted(sender, view_filter_id, view_filter, user, **kwar
_send_force_rows_refresh_if_view_public(view_filter.view)


@receiver(view_signals.view_group_by_created)
def public_view_group_by_created(sender, view_group_by, user, **kwargs):
_send_force_view_refresh_if_view_public(view_group_by.view)


@receiver(view_signals.view_group_by_updated)
def public_view_group_by_updated(sender, view_group_by, user, **kwargs):
_send_force_view_refresh_if_view_public(view_group_by.view)


@receiver(view_signals.view_group_by_deleted)
def public_view_group_by_deleted(
sender, view_group_by_id, view_group_by, user, **kwargs
):
_send_force_view_refresh_if_view_public(view_group_by.view)


@receiver(view_signals.view_field_options_updated)
def public_view_field_options_updated(sender, view, user, **kwargs):
_send_force_view_refresh_if_view_public(view)
Loading
Loading