From bbe77153ffed17786627d8f8cdf192d21ec8b83e Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Mon, 3 Nov 2025 09:56:13 +0100 Subject: [PATCH 01/58] Create fetcher utility Signed-off-by: Assem Hafez --- .../get-workflow-history.types.ts | 8 + .../workflow-history-multi-page-fixture.ts | 42 +++ .../workflow-history-fetcher.test.tsx | 352 ++++++++++++++++++ .../helpers/workflow-history-fetcher.ts | 150 ++++++++ .../helpers/workflow-history-fetcher.types.ts | 24 ++ 5 files changed, 576 insertions(+) create mode 100644 src/views/workflow-history/__fixtures__/workflow-history-multi-page-fixture.ts create mode 100644 src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx create mode 100644 src/views/workflow-history/helpers/workflow-history-fetcher.ts create mode 100644 src/views/workflow-history/helpers/workflow-history-fetcher.types.ts diff --git a/src/route-handlers/get-workflow-history/get-workflow-history.types.ts b/src/route-handlers/get-workflow-history/get-workflow-history.types.ts index 811af96ea..fcc6f14d7 100644 --- a/src/route-handlers/get-workflow-history/get-workflow-history.types.ts +++ b/src/route-handlers/get-workflow-history/get-workflow-history.types.ts @@ -1,6 +1,10 @@ +import { type z } from 'zod'; + import { type GetWorkflowExecutionHistoryResponse } from '@/__generated__/proto-ts/uber/cadence/api/v1/GetWorkflowExecutionHistoryResponse'; import { type DefaultMiddlewaresContext } from '@/utils/route-handlers-middleware'; +import type getWorkflowHistoryQueryParamsSchema from './schemas/get-workflow-history-query-params-schema'; + export type RouteParams = { domain: string; cluster: string; @@ -12,6 +16,10 @@ export type RequestParams = { params: RouteParams; }; +export type WorkflowHistoryQueryParams = z.infer< + typeof getWorkflowHistoryQueryParamsSchema +>; + export type GetWorkflowHistoryResponse = GetWorkflowExecutionHistoryResponse; export type Context = DefaultMiddlewaresContext; diff --git a/src/views/workflow-history/__fixtures__/workflow-history-multi-page-fixture.ts b/src/views/workflow-history/__fixtures__/workflow-history-multi-page-fixture.ts new file mode 100644 index 000000000..132720796 --- /dev/null +++ b/src/views/workflow-history/__fixtures__/workflow-history-multi-page-fixture.ts @@ -0,0 +1,42 @@ +import { type GetWorkflowHistoryResponse } from '@/route-handlers/get-workflow-history/get-workflow-history.types'; + +import { + scheduleActivityTaskEvent, + startActivityTaskEvent, + completeActivityTaskEvent, +} from './workflow-history-activity-events'; +import { + completeDecisionTaskEvent, + scheduleDecisionTaskEvent, + startDecisionTaskEvent, +} from './workflow-history-decision-events'; + +/** + * Multi-page workflow history fixture for testing pagination + * Contains 3 pages with various events + */ +const workflowHistoryMultiPageFixture: GetWorkflowHistoryResponse[] = [ + // Page 1: Activity task scheduled and started + { + history: { events: [scheduleActivityTaskEvent, startActivityTaskEvent] }, + rawHistory: [], + archived: false, + nextPageToken: 'page2', + }, + // Page 2: Activity completed and decision task scheduled + { + history: { events: [completeActivityTaskEvent, scheduleDecisionTaskEvent] }, + rawHistory: [], + archived: false, + nextPageToken: 'page3', + }, + // Page 3: Decision task started and completed (last page) + { + history: { events: [startDecisionTaskEvent, completeDecisionTaskEvent] }, + rawHistory: [], + archived: false, + nextPageToken: '', + }, +]; + +export default workflowHistoryMultiPageFixture; diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx new file mode 100644 index 000000000..200a9ebd3 --- /dev/null +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx @@ -0,0 +1,352 @@ +import { QueryClient } from '@tanstack/react-query'; +import { HttpResponse } from 'msw'; + +import { waitFor } from '@/test-utils/rtl'; + +import { type GetWorkflowHistoryResponse } from '@/route-handlers/get-workflow-history/get-workflow-history.types'; +import mswMockEndpoints from '@/test-utils/msw-mock-handlers/helper/msw-mock-endpoints'; + +import workflowHistoryMultiPageFixture from '../../__fixtures__/workflow-history-multi-page-fixture'; +import WorkflowHistoryFetcher from '../workflow-history-fetcher'; + +describe(WorkflowHistoryFetcher.name, () => { + let queryClient: QueryClient; + + beforeEach(() => { + queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + staleTime: Infinity, + refetchOnWindowFocus: false, + }, + }, + }); + }); + + afterEach(() => { + queryClient.clear(); + }); + + it('should return the current query state from getCurrentState', async () => { + const { fetcher } = setup(queryClient); + + const initialState = fetcher.getCurrentState(); + expect(initialState.data).toBeUndefined(); + expect(initialState.status).toBe('pending'); + + fetcher.unmount(); + }); + + it('should call onChange callback on state changes', async () => { + const { fetcher } = setup(queryClient); + const callback = jest.fn(); + + fetcher.onChange(callback); + const initialCallCount = callback.mock.calls.length; + + fetcher.start((state) => !state?.data?.pages?.length); + + await waitFor(() => { + expect(callback.mock.calls.length).toBeGreaterThan(initialCallCount); + }); + fetcher.unmount(); + }); + + it('should return unsubscribe function', async () => { + const { fetcher } = setup(queryClient); + const callback1 = jest.fn(); + const callback2 = jest.fn(); + + const unsubscribe1 = fetcher.onChange(callback1); + fetcher.onChange(callback2); + + fetcher.start((state) => !state?.data?.pages?.length); + + await waitFor(() => { + expect(callback1.mock.calls.length).toEqual(callback2.mock.calls.length); + expect(callback1.mock.calls.length).toBeGreaterThan(1); + }); + + const countBeforeUnsubscribe = callback1.mock.calls.length; + unsubscribe1(); + + fetcher.fetchSingleNextPage(); + + await waitFor(() => { + expect(callback2.mock.calls.length).toBeGreaterThan( + countBeforeUnsubscribe + ); + }); + + fetcher.unmount(); + }); + + it('should respect shouldContinue callback', async () => { + const { fetcher } = setup(queryClient); + const shouldContinue = jest.fn(() => false); + + fetcher.start(shouldContinue); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.isFetching).toBe(false); + }); + + const state = fetcher.getCurrentState(); + expect(state.data?.pages || []).toHaveLength(0); + + fetcher.unmount(); + }); + + it('should stop after shouldContinue returns false', async () => { + const { fetcher } = setup(queryClient); + const shouldContinue = jest.fn((state) => { + return (state.data?.pages.length || 0) < 2; + }); + + fetcher.start(shouldContinue); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.isFetching).toBe(false); + expect(state.data?.pages).toHaveLength(2); + }); + + fetcher.unmount(); + }); + + it('should load all pages and auto-stop when there are no more pages', async () => { + const { fetcher } = setup(queryClient); + + fetcher.start(); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.hasNextPage).toBe(false); + expect(state.data?.pages).toHaveLength(3); + }); + + fetcher.unmount(); + }); + + it('should auto-stop on error after initial success', async () => { + jest.useFakeTimers(); + + try { + const { fetcher } = setup(queryClient, { failOnPages: [2] }); + + fetcher.start(); + + // Wait for first page to load successfully + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.data?.pages).toHaveLength(1); + }); + + // Fast-forward through retry delays (3 retries * 3000ms each) + await jest.advanceTimersByTimeAsync(3 * 3000); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.isFetching).toBe(false); + expect(state.isError).toBe(true); + expect(state.data?.pages).toHaveLength(1); + }); + + fetcher.unmount(); + } finally { + jest.useRealTimers(); + } + }); + + it('should allow manual stop for loading all pages', async () => { + const { fetcher } = setup(queryClient); + + let stopped = false; + fetcher.onChange((state) => { + if (state.data?.pages.length === 1 && !stopped) { + stopped = true; + fetcher.stop(); + } + }); + + fetcher.start(); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.isFetching).toBe(false); + expect(state.data?.pages).toHaveLength(1); + }); + + fetcher.unmount(); + }); + + it('should allow start again after stop', async () => { + const { fetcher } = setup(queryClient); + + let stopped = false; + fetcher.onChange((state) => { + if (state.data?.pages.length === 1 && !stopped) { + stopped = true; + fetcher.stop(); + } + }); + + fetcher.start(); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.isFetching).toBe(false); + expect(state.data?.pages).toHaveLength(1); + }); + + fetcher.start(); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.isFetching).toBe(false); + }); + + const finalState = fetcher.getCurrentState(); + expect(finalState.data?.pages).toHaveLength(3); + fetcher.unmount(); + }); + + it('should fetch next page when available', async () => { + const { fetcher } = setup(queryClient); + + fetcher.start((state) => !state?.data?.pages?.length); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.data?.pages).toHaveLength(1); + }); + fetcher.stop(); + + fetcher.fetchSingleNextPage(); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.data?.pages).toHaveLength(2); + }); + + fetcher.unmount(); + }); + + it('should not fetch when already fetching', async () => { + const { fetcher } = setup(queryClient); + + fetcher.start((state) => !state?.data?.pages?.length); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.data?.pages).toHaveLength(1); + }); + fetcher.stop(); + + // fetching twice should not fetch again + fetcher.fetchSingleNextPage(); + fetcher.fetchSingleNextPage(); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(!state.isFetchingNextPage).toBe(true); + }); + + const state = fetcher.getCurrentState(); + expect(state.data?.pages).toHaveLength(2); + + fetcher.unmount(); + }); + + it('should not fetch when no next page available', async () => { + const { fetcher } = setup(queryClient); + + fetcher.start(); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.hasNextPage).toBe(false); + }); + + const pageCountBefore = fetcher.getCurrentState().data?.pages.length; + fetcher.fetchSingleNextPage(); + + const state = fetcher.getCurrentState(); + expect(state.data?.pages.length).toBe(pageCountBefore); + fetcher.unmount(); + }); +}); + +function setup(client: QueryClient, options: { failOnPages?: number[] } = {}) { + const params = { + domain: 'test-domain', + cluster: 'test-cluster', + workflowId: 'test-workflow-id', + runId: 'test-run-id', + pageSize: 10, + }; + + mockHistoryEndpoint(workflowHistoryMultiPageFixture, options.failOnPages); + + const fetcher = new WorkflowHistoryFetcher(client, params); + + const waitForData = async () => { + let unsubscribe: (() => void) | undefined; + await new Promise((resolve) => { + unsubscribe = fetcher.onChange((state) => { + if (state.data !== undefined) { + resolve(); + } + }); + }); + unsubscribe?.(); + }; + return { + fetcher, + params, + waitForData, + }; +} + +function mockHistoryEndpoint( + responses: GetWorkflowHistoryResponse[], + failOnPages: number[] = [] +) { + mswMockEndpoints([ + { + path: '/api/domains/:domain/:cluster/workflows/:workflowId/:runId/history', + httpMethod: 'GET', + mockOnce: false, // Persist across multiple requests + httpResolver: async ({ request }) => { + const url = new URL(request.url); + const nextPage = url.searchParams.get('nextPage'); + + // Determine current page number based on nextPage param + let pageNumber = 1; + if (!nextPage || nextPage === 'null' || nextPage === 'undefined') { + pageNumber = 1; + } else if (nextPage === 'page2') { + pageNumber = 2; + } else if (nextPage === 'page3') { + pageNumber = 3; + } + + // Check if this page should fail + if (failOnPages.includes(pageNumber)) { + return HttpResponse.json( + { message: 'Request failed' }, + { status: 500 } + ); + } + + // Map page number to response index (0-indexed) + const responseIndex = pageNumber - 1; + const response = + responses[responseIndex] || responses[responses.length - 1]; + return HttpResponse.json(response); + }, + }, + ]); +} diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.ts new file mode 100644 index 000000000..cef2493cf --- /dev/null +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.ts @@ -0,0 +1,150 @@ +import { InfiniteQueryObserver, type QueryClient } from '@tanstack/react-query'; +import queryString from 'query-string'; + +import { + type WorkflowHistoryQueryParams, + type GetWorkflowHistoryResponse, +} from '@/route-handlers/get-workflow-history/get-workflow-history.types'; +import request from '@/utils/request'; +import { type RequestError } from '@/utils/request/request-error'; + +import { + type WorkflowHistoryQueryResult, + type QueryResultOnChangeCallback, + type ShouldContinueCallback, + type WorkflowHistoryQueryKey, +} from './workflow-history-fetcher.types'; + +export default class WorkflowHistoryFetcher { + private observer: InfiniteQueryObserver< + GetWorkflowHistoryResponse, + RequestError + >; + + private unsubscribe: (() => void) | null = null; + private isStarted = false; + private shouldContinue: ShouldContinueCallback = () => true; + + constructor( + private readonly queryClient: QueryClient, + private readonly params: WorkflowHistoryQueryParams + ) { + this.observer = new InfiniteQueryObserver< + GetWorkflowHistoryResponse, + RequestError + >(this.queryClient, { + ...this.buildObserverOptions(this.params), + }); + } + + onChange(callback: QueryResultOnChangeCallback): () => void { + const current = this.getCurrentState(); + if (current) callback(current); + return this.observer.subscribe((res: any) => { + callback(res); + }); + } + + start(shouldContinue: ShouldContinueCallback = () => true): void { + if (shouldContinue) { + this.shouldContinue = shouldContinue; + } + // If already started, return + if (this.isStarted) return; + this.isStarted = true; + let emitCount = 0; + const currentState = this.observer.getCurrentResult(); + const fetchedFirstPage = currentState.status !== 'pending'; + const shouldEnableQuery = + (!fetchedFirstPage && shouldContinue(currentState)) || fetchedFirstPage; + + if (shouldEnableQuery) { + this.observer.setOptions({ + ...this.buildObserverOptions(this.params), + enabled: true, + }); + } + + const emit = (res: WorkflowHistoryQueryResult) => { + emitCount++; + + // Auto stop when there are no more pages (end of history) or when there is a fresh error happens after the start. + // isError is true when the request failes and retries are exhausted. + if (res.hasNextPage === false || (res.isError && emitCount > 1)) { + this.stop(); + return; + } + + // Drive pagination based on external predicate + if (this.shouldContinue(res) && !res.isFetchingNextPage) { + res.fetchNextPage(); + } + }; + + // only start emit (fetching next pages) after the initial fetch is complete + // first page is already fetched on the first subscription below + if (fetchedFirstPage) { + emit(currentState); + } + + if (this.unsubscribe) { + this.unsubscribe(); + } + this.unsubscribe = this.observer.subscribe((res) => emit(res)); + } + + stop(): void { + this.isStarted = false; + if (this.unsubscribe) { + this.unsubscribe(); + this.unsubscribe = null; + } + } + unmount(): void { + this.stop(); + this.observer.destroy(); + } + + fetchSingleNextPage(): void { + const state = this.getCurrentState(); + + if (state.status === 'pending') { + this.observer.setOptions({ + ...this.buildObserverOptions(this.params), + enabled: true, + }); + } else if (!state.isFetchingNextPage && state.hasNextPage) + state.fetchNextPage(); + } + + getCurrentState(): WorkflowHistoryQueryResult { + return this.observer.getCurrentResult(); + } + + private buildObserverOptions(params: WorkflowHistoryQueryParams) { + return { + queryKey: [ + 'workflow_history_paginated', + params, + ] satisfies WorkflowHistoryQueryKey, + queryFn: ({ queryKey: [_, qp], pageParam }: any) => + request( + queryString.stringifyUrl({ + url: `/api/domains/${qp.domain}/${qp.cluster}/workflows/${qp.workflowId}/${qp.runId}/history`, + query: { + nextPage: pageParam, + pageSize: qp.pageSize, + waitForNewEvent: qp.waitForNewEvent ?? false, + } satisfies WorkflowHistoryQueryParams, + }) + ).then((res) => res.json()), + initialPageParam: undefined, + getNextPageParam: (lastPage: GetWorkflowHistoryResponse) => { + return lastPage.nextPageToken ? lastPage.nextPageToken : undefined; + }, + retry: 3, + retryDelay: 3000, + enabled: false, + }; + } +} diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.types.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.types.ts new file mode 100644 index 000000000..457f8b37b --- /dev/null +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.types.ts @@ -0,0 +1,24 @@ +import { + type InfiniteData, + type InfiniteQueryObserverResult, +} from '@tanstack/react-query'; + +import { + type WorkflowHistoryQueryParams, + type GetWorkflowHistoryResponse, +} from '@/route-handlers/get-workflow-history/get-workflow-history.types'; +import { type RequestError } from '@/utils/request/request-error'; + +export type WorkflowHistoryQueryKey = [string, WorkflowHistoryQueryParams]; + +export type WorkflowHistoryQueryResult = InfiniteQueryObserverResult< + InfiniteData, + RequestError +>; +export type QueryResultOnChangeCallback = ( + state: WorkflowHistoryQueryResult +) => void; + +export type ShouldContinueCallback = ( + state: WorkflowHistoryQueryResult +) => boolean; From 6b099458feb93feafa4cf6bdd10018946079591c Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Mon, 3 Nov 2025 10:26:30 +0100 Subject: [PATCH 02/58] rename query Signed-off-by: Assem Hafez --- .../workflow-history/helpers/workflow-history-fetcher.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.ts index cef2493cf..4b97cf89d 100644 --- a/src/views/workflow-history/helpers/workflow-history-fetcher.ts +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.ts @@ -123,10 +123,7 @@ export default class WorkflowHistoryFetcher { private buildObserverOptions(params: WorkflowHistoryQueryParams) { return { - queryKey: [ - 'workflow_history_paginated', - params, - ] satisfies WorkflowHistoryQueryKey, + queryKey: ['workflow_history', params] satisfies WorkflowHistoryQueryKey, queryFn: ({ queryKey: [_, qp], pageParam }: any) => request( queryString.stringifyUrl({ From 1b5796af04c35734f5e69558438ddd1fbddf4e77 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Mon, 3 Nov 2025 11:41:28 +0100 Subject: [PATCH 03/58] Create hook for fetching history Signed-off-by: Assem Hafez --- .../use-workflow-history-fetcher.test.tsx | 196 ++++++++++++++++++ .../hooks/use-workflow-history-fetcher.ts | 87 ++++++++ 2 files changed, 283 insertions(+) create mode 100644 src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx create mode 100644 src/views/workflow-history/hooks/use-workflow-history-fetcher.ts diff --git a/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx new file mode 100644 index 000000000..9c5a83976 --- /dev/null +++ b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx @@ -0,0 +1,196 @@ +import { QueryClient } from '@tanstack/react-query'; + +import { act, renderHook, waitFor } from '@/test-utils/rtl'; + +import workflowHistoryMultiPageFixture from '../../__fixtures__/workflow-history-multi-page-fixture'; +import { workflowPageUrlParams } from '../../__fixtures__/workflow-page-url-params'; +import WorkflowHistoryFetcher from '../../helpers/workflow-history-fetcher'; +import useWorkflowHistoryFetcher from '../use-workflow-history-fetcher'; + +jest.mock('../../helpers/workflow-history-fetcher'); + +const mockParams = { + ...workflowPageUrlParams, + pageSize: 50, + waitForNewEvent: true, +}; +let mockFetcherInstance: jest.Mocked; +let mockOnChangeCallback: jest.Mock; +let mockUnsubscribe: jest.Mock; + +function setup() { + const hookResult = renderHook(() => useWorkflowHistoryFetcher(mockParams)); + + return { + ...hookResult, + mockFetcherInstance, + mockOnChangeCallback, + mockUnsubscribe, + }; +} + +describe(useWorkflowHistoryFetcher.name, () => { + beforeEach(() => { + jest.clearAllMocks(); + + mockOnChangeCallback = jest.fn(); + mockUnsubscribe = jest.fn(); + + mockFetcherInstance = { + start: jest.fn(), + stop: jest.fn(), + unmount: jest.fn(), + fetchSingleNextPage: jest.fn(), + onChange: jest.fn((callback) => { + mockOnChangeCallback.mockImplementation(callback); + return mockUnsubscribe; + }), + getCurrentState: jest.fn(() => ({ + data: undefined, + error: null, + isError: false, + isLoading: false, + isPending: true, + isFetchingNextPage: false, + hasNextPage: false, + status: 'pending' as const, + })), + } as unknown as jest.Mocked; + + ( + WorkflowHistoryFetcher as jest.MockedClass + ).mockImplementation(() => mockFetcherInstance); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should create a WorkflowHistoryFetcher instance with correct params', () => { + setup(); + + expect(WorkflowHistoryFetcher).toHaveBeenCalledWith( + expect.any(QueryClient), + mockParams + ); + expect(WorkflowHistoryFetcher).toHaveBeenCalledTimes(1); + }); + + it('should reuse the same fetcher instance on re-renders', () => { + const { rerender } = setup(); + + rerender(); + rerender(); + + expect(WorkflowHistoryFetcher).toHaveBeenCalledTimes(1); + }); + + it('should subscribe to fetcher state changes on mount', () => { + setup(); + + expect(mockFetcherInstance.onChange).toHaveBeenCalledTimes(1); + }); + + it('should start fetcher to load first page on mount', () => { + setup(); + + expect(mockFetcherInstance.start).toHaveBeenCalledWith( + expect.any(Function) + ); + expect(mockFetcherInstance.start).toHaveBeenCalledTimes(1); + }); + + it('should return initial history query state', () => { + const { result } = setup(); + + expect(result.current.historyQuery).toBeDefined(); + expect(result.current.historyQuery.isPending).toBe(true); + }); + + it('should update historyQuery when fetcher state changes', async () => { + const { result, mockOnChangeCallback } = setup(); + + const newState = { + data: { + pages: [workflowHistoryMultiPageFixture[0]], + pageParams: [], + }, + error: null, + isError: false, + isLoading: false, + isPending: false, + isFetchingNextPage: false, + hasNextPage: true, + status: 'success' as const, + }; + + act(() => { + mockOnChangeCallback(newState); + }); + + await waitFor(() => { + expect(result.current.historyQuery.status).toBe('success'); + }); + }); + + it('should call fetcher.start() with custom shouldContinue callback passed to startLoadingHistory', () => { + const { result, mockFetcherInstance } = setup(); + const customShouldContinue = jest.fn(() => false); + + act(() => { + result.current.startLoadingHistory(customShouldContinue); + }); + + expect(mockFetcherInstance.start).toHaveBeenCalledWith( + customShouldContinue + ); + }); + + it('should call fetcher.stop() within stopLoadingHistory', () => { + const { result, mockFetcherInstance } = setup(); + + act(() => { + result.current.stopLoadingHistory(); + }); + + expect(mockFetcherInstance.stop).toHaveBeenCalledTimes(1); + }); + + it('should call fetcher.fetchSingleNextPage() within fetchSingleNextPage', () => { + const { result, mockFetcherInstance } = setup(); + + act(() => { + result.current.fetchSingleNextPage(); + }); + + expect(mockFetcherInstance.fetchSingleNextPage).toHaveBeenCalledTimes(1); + }); + + it('should unsubscribe from onChange when unmounted', () => { + const { unmount, mockUnsubscribe } = setup(); + + unmount(); + + expect(mockUnsubscribe).toHaveBeenCalledTimes(1); + }); + + it('should call fetcher.unmount() when component unmounts', () => { + const { unmount, mockFetcherInstance } = setup(); + + unmount(); + + expect(mockFetcherInstance.unmount).toHaveBeenCalledTimes(1); + }); + + it('should return all expected methods and state', () => { + const { result } = setup(); + + expect(result.current).toHaveProperty('historyQuery'); + expect(result.current).toHaveProperty('startLoadingHistory'); + expect(result.current).toHaveProperty('stopLoadingHistory'); + expect(result.current).toHaveProperty('fetchSingleNextPage'); + expect(typeof result.current.startLoadingHistory).toBe('function'); + expect(typeof result.current.stopLoadingHistory).toBe('function'); + expect(typeof result.current.fetchSingleNextPage).toBe('function'); + }); +}); diff --git a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts new file mode 100644 index 000000000..e23d7c4e6 --- /dev/null +++ b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts @@ -0,0 +1,87 @@ +import { useCallback, useEffect, useRef } from 'react'; + +import { + type InfiniteData, + type InfiniteQueryObserverResult, + useQueryClient, +} from '@tanstack/react-query'; + +import useThrottledState from '@/hooks/use-throttled-state'; +import { + type WorkflowHistoryQueryParams, + type GetWorkflowHistoryResponse, + type RouteParams, +} from '@/route-handlers/get-workflow-history/get-workflow-history.types'; +import { type RequestError } from '@/utils/request/request-error'; + +import WorkflowHistoryFetcher from '../helpers/workflow-history-fetcher'; +import { type ShouldContinueCallback } from '../helpers/workflow-history-fetcher.types'; + +export default function useWorkflowHistoryFetcher( + params: WorkflowHistoryQueryParams & RouteParams +) { + const queryClient = useQueryClient(); + const fetcherRef = useRef(null); + + if (!fetcherRef.current) { + fetcherRef.current = new WorkflowHistoryFetcher(queryClient, params); + } + + const [historyQuery, setHistoryQuery] = useThrottledState< + InfiniteQueryObserverResult< + InfiniteData, + RequestError + > + >(fetcherRef.current.getCurrentState(), 2000, { + leading: true, + trailing: true, + }); + + useEffect(() => { + if (!fetcherRef.current) return; + + const unsubscribe = fetcherRef.current.onChange((state) => { + const pagesCount = state.data?.pages?.length || 0; + // immediately set if there is the first page without throttling other wise throttle + setHistoryQuery(() => state, pagesCount <= 1); + }); + + // Fetch first page + fetcherRef.current.start((state) => !state?.data?.pages?.length); + + return () => { + unsubscribe(); + }; + }, [setHistoryQuery]); + + useEffect(() => { + return () => { + fetcherRef.current?.unmount(); + }; + }, []); + + const startLoadingHistory = useCallback( + (shouldContinue: ShouldContinueCallback = () => true) => { + if (!fetcherRef.current) return; + fetcherRef.current.start(shouldContinue); + }, + [] + ); + + const stopLoadingHistory = useCallback(() => { + if (!fetcherRef.current) return; + fetcherRef.current.stop(); + }, []); + + const fetchSingleNextPage = useCallback(() => { + if (!fetcherRef.current) return; + fetcherRef.current.fetchSingleNextPage(); + }, []); + + return { + historyQuery, + startLoadingHistory, + stopLoadingHistory, + fetchSingleNextPage, + }; +} From ae114ecc3e754a8aa148c289ccdd956fb46e863c Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Mon, 3 Nov 2025 13:09:29 +0100 Subject: [PATCH 04/58] add configurable throttleMs to the hook Signed-off-by: Assem Hafez --- .../workflow-history/hooks/use-workflow-history-fetcher.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts index e23d7c4e6..cfb375f13 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts @@ -18,7 +18,8 @@ import WorkflowHistoryFetcher from '../helpers/workflow-history-fetcher'; import { type ShouldContinueCallback } from '../helpers/workflow-history-fetcher.types'; export default function useWorkflowHistoryFetcher( - params: WorkflowHistoryQueryParams & RouteParams + params: WorkflowHistoryQueryParams & RouteParams, + throttleMs: number = 2000 ) { const queryClient = useQueryClient(); const fetcherRef = useRef(null); @@ -32,7 +33,7 @@ export default function useWorkflowHistoryFetcher( InfiniteData, RequestError > - >(fetcherRef.current.getCurrentState(), 2000, { + >(fetcherRef.current.getCurrentState(), throttleMs, { leading: true, trailing: true, }); From 8611eed9bba39966de687bb5831ef922f5654451 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Mon, 3 Nov 2025 13:16:44 +0100 Subject: [PATCH 05/58] use fetcher in workflow history --- .../__tests__/workflow-history.test.tsx | 19 ++- .../workflow-history-header.types.ts | 6 +- .../workflow-history/workflow-history.tsx | 134 ++++++++++-------- 3 files changed, 86 insertions(+), 73 deletions(-) diff --git a/src/views/workflow-history/__tests__/workflow-history.test.tsx b/src/views/workflow-history/__tests__/workflow-history.test.tsx index 99cdb5f67..797b1fde6 100644 --- a/src/views/workflow-history/__tests__/workflow-history.test.tsx +++ b/src/views/workflow-history/__tests__/workflow-history.test.tsx @@ -28,6 +28,15 @@ jest.mock('@/hooks/use-page-query-params/use-page-query-params', () => jest.fn(() => [{ historySelectedEventId: '1' }, jest.fn()]) ); +// Mock the hook to use minimal throttle delay for faster tests +jest.mock('../hooks/use-workflow-history-fetcher', () => { + const actual = jest.requireActual('../hooks/use-workflow-history-fetcher'); + return { + __esModule: true, + default: jest.fn((params) => actual.default(params, 0)), // 0ms throttle for tests + }; +}); + jest.mock( '../workflow-history-compact-event-card/workflow-history-compact-event-card', () => jest.fn(() =>
Compact group Card
) @@ -90,24 +99,24 @@ describe('WorkflowHistory', () => { }); it('renders page header correctly', async () => { - setup({}); + await setup({}); expect( await screen.findByText('Workflow history Header') ).toBeInTheDocument(); }); it('renders compact group cards', async () => { - setup({}); + await setup({}); expect(await screen.findByText('Compact group Card')).toBeInTheDocument(); }); it('renders timeline group cards', async () => { - setup({}); + await setup({}); expect(await screen.findByText('Timeline group card')).toBeInTheDocument(); }); it('renders load more section', async () => { - setup({}); + await setup({}); expect(await screen.findByText('Load more')).toBeInTheDocument(); }); @@ -180,7 +189,7 @@ describe('WorkflowHistory', () => { }); it('should show no results when filtered events are empty', async () => { - setup({ emptyEvents: true }); + await setup({ emptyEvents: true }); expect(await screen.findByText('No Results')).toBeInTheDocument(); }); diff --git a/src/views/workflow-history/workflow-history-header/workflow-history-header.types.ts b/src/views/workflow-history/workflow-history-header/workflow-history-header.types.ts index c68f700e1..0658fc643 100644 --- a/src/views/workflow-history/workflow-history-header/workflow-history-header.types.ts +++ b/src/views/workflow-history/workflow-history-header/workflow-history-header.types.ts @@ -8,10 +8,6 @@ import { type Props as WorkflowHistoryExportJsonButtonProps } from '../workflow- import { type Props as WorkflowHistoryTimelineChartProps } from '../workflow-history-timeline-chart/workflow-history-timeline-chart.types'; type WorkflowPageQueryParamsConfig = typeof workflowPageQueryParamsConfig; -type WorkflowHistoryRequestArgs = WorkflowHistoryExportJsonButtonProps & { - pageSize: number; - waitForNewEvent: string; -}; type PageFiltersProps = { resetAllFilters: () => void; @@ -25,7 +21,7 @@ export type Props = { toggleIsExpandAllEvents: () => void; isUngroupedHistoryViewEnabled: boolean; onClickGroupModeToggle: () => void; - wfHistoryRequestArgs: WorkflowHistoryRequestArgs; + wfHistoryRequestArgs: WorkflowHistoryExportJsonButtonProps; pageFiltersProps: PageFiltersProps; timelineChartProps: WorkflowHistoryTimelineChartProps; }; diff --git a/src/views/workflow-history/workflow-history.tsx b/src/views/workflow-history/workflow-history.tsx index b6a72fe60..301c227bd 100644 --- a/src/views/workflow-history/workflow-history.tsx +++ b/src/views/workflow-history/workflow-history.tsx @@ -2,16 +2,12 @@ import React, { useCallback, useContext, + useEffect, useMemo, useRef, useState, } from 'react'; -import { - useSuspenseInfiniteQuery, - type InfiniteData, -} from '@tanstack/react-query'; -import queryString from 'query-string'; import { Virtuoso, type VirtuosoHandle } from 'react-virtuoso'; import usePageFilters from '@/components/page-filters/hooks/use-page-filters'; @@ -19,11 +15,8 @@ import PageSection from '@/components/page-section/page-section'; import SectionLoadingIndicator from '@/components/section-loading-indicator/section-loading-indicator'; import useStyletronClasses from '@/hooks/use-styletron-classes'; import useThrottledState from '@/hooks/use-throttled-state'; -import { type GetWorkflowHistoryResponse } from '@/route-handlers/get-workflow-history/get-workflow-history.types'; import parseGrpcTimestamp from '@/utils/datetime/parse-grpc-timestamp'; import decodeUrlParams from '@/utils/decode-url-params'; -import request from '@/utils/request'; -import { type RequestError } from '@/utils/request/request-error'; import sortBy from '@/utils/sort-by'; import { resetWorkflowActionConfig } from '../workflow-actions/config/workflow-actions.config'; @@ -41,7 +34,7 @@ import pendingActivitiesInfoToEvents from './helpers/pending-activities-info-to- import pendingDecisionInfoToEvent from './helpers/pending-decision-info-to-event'; import useEventExpansionToggle from './hooks/use-event-expansion-toggle'; import useInitialSelectedEvent from './hooks/use-initial-selected-event'; -import useKeepLoadingEvents from './hooks/use-keep-loading-events'; +import useWorkflowHistoryFetcher from './hooks/use-workflow-history-fetcher'; import WorkflowHistoryCompactEventCard from './workflow-history-compact-event-card/workflow-history-compact-event-card'; import { WorkflowHistoryContext } from './workflow-history-context-provider/workflow-history-context-provider'; import WorkflowHistoryHeader from './workflow-history-header/workflow-history-header'; @@ -63,8 +56,26 @@ export default function WorkflowHistory({ params }: Props) { const wfHistoryRequestArgs = { ...historyQueryParams, pageSize: WORKFLOW_HISTORY_PAGE_SIZE_CONFIG, - waitForNewEvent: 'true', + waitForNewEvent: true, }; + + const { + historyQuery, + startLoadingHistory, + stopLoadingHistory, + fetchSingleNextPage, + } = useWorkflowHistoryFetcher( + { + domain: wfHistoryRequestArgs.domain, + cluster: wfHistoryRequestArgs.cluster, + workflowId: wfHistoryRequestArgs.workflowId, + runId: wfHistoryRequestArgs.runId, + pageSize: wfHistoryRequestArgs.pageSize, + waitForNewEvent: wfHistoryRequestArgs.waitForNewEvent, + }, + 2000 + ); + const [resetToDecisionEventId, setResetToDecisionEventId] = useState< string | undefined >(undefined); @@ -96,38 +107,16 @@ export default function WorkflowHistory({ params }: Props) { const { data: result, hasNextPage, - fetchNextPage, isFetchingNextPage, + isLoading, + isPending, error, isFetchNextPageError, - } = useSuspenseInfiniteQuery< - GetWorkflowHistoryResponse, - RequestError, - InfiniteData, - [string, typeof wfHistoryRequestArgs], - string | undefined - >({ - queryKey: ['workflow_history_paginated', wfHistoryRequestArgs] as const, - queryFn: ({ queryKey: [_, qp], pageParam }) => - request( - `/api/domains/${qp.domain}/${qp.cluster}/workflows/${qp.workflowId}/${qp.runId}/history?${queryString.stringify( - { - nextPage: pageParam, - pageSize: qp.pageSize, - waitForNewEvent: qp.waitForNewEvent, - } - )}` - ).then((res) => res.json()), - initialPageParam: undefined, - getNextPageParam: (lastPage) => { - if (!lastPage?.nextPageToken) return undefined; - return lastPage?.nextPageToken; - }, - }); + } = historyQuery; const events = useMemo( () => - (result.pages || []) + (result?.pages || []) .flat(1) .map(({ history }) => history?.events || []) .flat(1), @@ -194,15 +183,21 @@ export default function WorkflowHistory({ params }: Props) { ); const [visibleGroupsRange, setTimelineListVisibleRange] = - useThrottledState({ - startIndex: -1, - endIndex: -1, - compactStartIndex: -1, - compactEndIndex: -1, - ungroupedStartIndex: -1, - ungroupedEndIndex: -1, - }); - + useThrottledState( + { + startIndex: -1, + endIndex: -1, + compactStartIndex: -1, + compactEndIndex: -1, + ungroupedStartIndex: -1, + ungroupedEndIndex: -1, + }, + 700, + { + leading: false, + trailing: true, + } + ); const onClickGroupModeToggle = useCallback(() => { setUngroupedViewUserPreference(!isUngroupedHistoryViewEnabled); @@ -243,7 +238,7 @@ export default function WorkflowHistory({ params }: Props) { }); const isLastPageEmpty = - result.pages[result.pages.length - 1].history?.events.length === 0; + result?.pages?.[result?.pages?.length - 1]?.history?.events.length === 0; const visibleGroupsHasMissingEvents = useMemo(() => { return getVisibleGroupsHasMissingEvents( @@ -277,19 +272,31 @@ export default function WorkflowHistory({ params }: Props) { ungroupedViewShouldLoadMoreEvents, ]); - const { isLoadingMore, reachedAvailableHistoryEnd } = useKeepLoadingEvents({ - shouldKeepLoading: keepLoadingMoreEvents, - stopAfterEndReached: true, - continueLoadingAfterError: true, - hasNextPage, - fetchNextPage, - isFetchingNextPage, - isLastPageEmpty, - isFetchNextPageError, - }); + const manualFetchNextPage = useCallback(() => { + if (keepLoadingMoreEvents) { + startLoadingHistory(); + } else { + fetchSingleNextPage(); + } + }, [keepLoadingMoreEvents, startLoadingHistory, fetchSingleNextPage]); + + useEffect(() => { + if (keepLoadingMoreEvents) { + startLoadingHistory(); + } else { + stopLoadingHistory(); + } + }, [keepLoadingMoreEvents, startLoadingHistory, stopLoadingHistory]); + + const reachedEndOfAvailableHistory = + (!hasNextPage && !isPending) || + (hasNextPage && isLastPageEmpty && !isFetchNextPageError); const contentIsLoading = - shouldSearchForInitialEvent && !initialEventFound && isLoadingMore; + isLoading || + (shouldSearchForInitialEvent && + !initialEventFound && + !reachedEndOfAvailableHistory); const { isExpandAllEvents, @@ -339,7 +346,7 @@ export default function WorkflowHistory({ params }: Props) { : hasNextPage, hasMoreEvents: hasNextPage, isFetchingMoreEvents: isFetchingNextPage, - fetchMoreEvents: fetchNextPage, + fetchMoreEvents: manualFetchNextPage, onClickEventGroup: (eventGroupIndex) => { const eventId = filteredEventGroupsEntries[eventGroupIndex][1].events[0] @@ -389,7 +396,7 @@ export default function WorkflowHistory({ params }: Props) { error={error} hasMoreEvents={hasNextPage} isFetchingMoreEvents={isFetchingNextPage} - fetchMoreEvents={fetchNextPage} + fetchMoreEvents={manualFetchNextPage} getIsEventExpanded={getIsEventExpanded} toggleIsEventExpanded={toggleIsEventExpanded} onVisibleRangeChange={({ startIndex, endIndex }) => @@ -428,7 +435,7 @@ export default function WorkflowHistory({ params }: Props) { {...group} statusReady={ !group.hasMissingEvents || - reachedAvailableHistoryEnd + reachedEndOfAvailableHistory } workflowCloseStatus={ workflowExecutionInfo?.closeStatus @@ -458,7 +465,7 @@ export default function WorkflowHistory({ params }: Props) { )} endReached={() => { - if (!isFetchingNextPage && hasNextPage) fetchNextPage(); + manualFetchNextPage(); }} /> @@ -489,7 +496,8 @@ export default function WorkflowHistory({ params }: Props) { key={groupId} {...group} showLoadingMoreEvents={ - group.hasMissingEvents && !reachedAvailableHistoryEnd + group.hasMissingEvents && + !reachedEndOfAvailableHistory } resetToDecisionEventId={group.resetToDecisionEventId} isLastEvent={ @@ -520,7 +528,7 @@ export default function WorkflowHistory({ params }: Props) { Footer: () => ( From 819effb27f56a3397a2e17c35bb783d8abd39ffe Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Mon, 3 Nov 2025 13:46:47 +0100 Subject: [PATCH 06/58] remove useKeepLoadingEvents Signed-off-by: Assem Hafez --- .../__tests__/use-keep-loading-events.test.ts | 132 ------------------ .../hooks/use-keep-loading-events.ts | 47 ------- 2 files changed, 179 deletions(-) delete mode 100644 src/views/workflow-history/hooks/__tests__/use-keep-loading-events.test.ts delete mode 100644 src/views/workflow-history/hooks/use-keep-loading-events.ts diff --git a/src/views/workflow-history/hooks/__tests__/use-keep-loading-events.test.ts b/src/views/workflow-history/hooks/__tests__/use-keep-loading-events.test.ts deleted file mode 100644 index 4431ba5c7..000000000 --- a/src/views/workflow-history/hooks/__tests__/use-keep-loading-events.test.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { renderHook } from '@/test-utils/rtl'; - -import useKeepLoadingEvents from '../use-keep-loading-events'; -import { type UseKeepLoadingEventsParams } from '../use-keep-loading-events.types'; - -describe('useKeepLoadingEvents', () => { - afterEach(() => { - jest.clearAllMocks(); - }); - - it('should set reachedAvailableHistoryEnd to true when there are no more pages', () => { - const { result } = setup({ hasNextPage: false }); - expect(result.current.reachedAvailableHistoryEnd).toBe(true); - }); - - it('should call fetchNextPage when shouldKeepLoading is true and there are more pages', () => { - const { fetchNextPageMock } = setup({ shouldKeepLoading: true }); - - expect(fetchNextPageMock).toHaveBeenCalled(); - }); - - it('should not call fetchNextPage when shouldKeepLoading is false', () => { - const { fetchNextPageMock } = setup({ shouldKeepLoading: false }); - - expect(fetchNextPageMock).not.toHaveBeenCalled(); - }); - - it('should not call fetchNextPage when isFetchingNextPage is true', () => { - const { fetchNextPageMock } = setup({ isFetchingNextPage: true }); - - expect(fetchNextPageMock).not.toHaveBeenCalled(); - }); - - it('should not call fetchNextPage when stopAfterEndReached is true and reachedAvailableHistoryEnd is true', () => { - const { fetchNextPageMock } = setup({ - hasNextPage: false, - stopAfterEndReached: true, - }); - - expect(fetchNextPageMock).not.toHaveBeenCalled(); - }); - - it('should not call fetchNextPage after error when continueLoadingAfterError is false', () => { - const { fetchNextPageMock, rerender } = setup({ - isFetchNextPageError: true, - continueLoadingAfterError: false, - }); - - rerender({ isFetchNextPageError: false }); - - expect(fetchNextPageMock).not.toHaveBeenCalled(); - }); - - it('should call fetchNextPage after error when continueLoadingAfterError is true', () => { - const { fetchNextPageMock, rerender } = setup({ - isFetchNextPageError: true, - continueLoadingAfterError: true, - }); - - rerender({ isFetchNextPageError: false }); - - expect(fetchNextPageMock).toHaveBeenCalled(); - }); - - it('should set stoppedDueToError to true when isFetchNextPageError is true', () => { - const { result, rerender } = setup({ - isFetchNextPageError: false, - }); - - expect(result.current.stoppedDueToError).toBe(false); - - rerender({ isFetchNextPageError: true }); - - expect(result.current.stoppedDueToError).toBe(true); - }); - - it('should not call fetchNextPage when stoppedDueToError is true', () => { - const { fetchNextPageMock } = setup({ isFetchNextPageError: true }); - - expect(fetchNextPageMock).not.toHaveBeenCalled(); - }); - - it('should return isLoadingMore as true when keepLoadingMore conditions are met', () => { - const { result, rerender } = setup({ - shouldKeepLoading: true, - stopAfterEndReached: true, - hasNextPage: true, - isFetchNextPageError: false, - }); - - expect(result.current.isLoadingMore).toBe(true); - - rerender({ - shouldKeepLoading: true, - hasNextPage: true, - isFetchNextPageError: false, - // stopAfterEndReached and simulate end by empty events page - stopAfterEndReached: true, - isLastPageEmpty: true, - }); - expect(result.current.isLoadingMore).toBe(false); - - rerender({ - shouldKeepLoading: true, - stopAfterEndReached: true, - hasNextPage: true, - // adding error - isFetchNextPageError: true, - }); - expect(result.current.isLoadingMore).toBe(false); - }); -}); - -function setup(params: Partial) { - const fetchNextPage = jest.fn(); - const { result, rerender } = renderHook( - (runTimeChanges?: Partial) => - useKeepLoadingEvents({ - shouldKeepLoading: true, - stopAfterEndReached: true, - isLastPageEmpty: false, - hasNextPage: true, - fetchNextPage, - isFetchingNextPage: false, - isFetchNextPageError: false, - ...params, - ...runTimeChanges, - }) - ); - - return { result, rerender, fetchNextPageMock: fetchNextPage }; -} diff --git a/src/views/workflow-history/hooks/use-keep-loading-events.ts b/src/views/workflow-history/hooks/use-keep-loading-events.ts deleted file mode 100644 index 8ce917239..000000000 --- a/src/views/workflow-history/hooks/use-keep-loading-events.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { useEffect, useRef } from 'react'; - -import { type UseKeepLoadingEventsParams } from './use-keep-loading-events.types'; - -export default function useKeepLoadingEvents({ - shouldKeepLoading, - isLastPageEmpty, - hasNextPage, - fetchNextPage, - isFetchingNextPage, - stopAfterEndReached, - isFetchNextPageError, - continueLoadingAfterError, -}: UseKeepLoadingEventsParams) { - const reachedAvailableHistoryEnd = useRef(false); - - const hadErrorOnce = useRef(isFetchNextPageError); - // update reachedAvailableHistoryEnd - const reached = - !hasNextPage || (hasNextPage && isLastPageEmpty && !isFetchNextPageError); - if (reached && !reachedAvailableHistoryEnd.current) - reachedAvailableHistoryEnd.current = true; - - // update hadErrorOnce - if (isFetchNextPageError && !hadErrorOnce.current) - hadErrorOnce.current = true; - - const stopDueToError = - isFetchNextPageError || - (hadErrorOnce.current && !continueLoadingAfterError); - - const canLoadMore = - shouldKeepLoading && - !(stopAfterEndReached && reachedAvailableHistoryEnd.current) && - !stopDueToError && - hasNextPage; - - useEffect(() => { - if (canLoadMore && !isFetchingNextPage) fetchNextPage(); - }, [isFetchingNextPage, fetchNextPage, canLoadMore]); - - return { - reachedAvailableHistoryEnd: reachedAvailableHistoryEnd.current, - stoppedDueToError: stopDueToError, - isLoadingMore: canLoadMore, - }; -} From 459acbe2d575b9e2400123e5c99f10ddbdd0249a Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Wed, 5 Nov 2025 09:57:21 +0100 Subject: [PATCH 07/58] grouping utility Signed-off-by: Assem Hafez --- .../workflow-history-grouper.test.tsx | 718 ++++++++++++++++++ .../helpers/workflow-history-grouper.ts | 522 +++++++++++++ .../helpers/workflow-history-grouper.types.ts | 57 ++ 3 files changed, 1297 insertions(+) create mode 100644 src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx create mode 100644 src/views/workflow-history/helpers/workflow-history-grouper.ts create mode 100644 src/views/workflow-history/helpers/workflow-history-grouper.types.ts diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx new file mode 100644 index 000000000..706a7e99f --- /dev/null +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx @@ -0,0 +1,718 @@ +import { type HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; + +import { + completedActivityTaskEvents, + scheduleActivityTaskEvent, + startActivityTaskEvent, +} from '../../__fixtures__/workflow-history-activity-events'; +import { + completedDecisionTaskEvents, + scheduleDecisionTaskEvent, + startDecisionTaskEvent, +} from '../../__fixtures__/workflow-history-decision-events'; +import { + pendingActivityTaskStartEvent, + pendingDecisionTaskStartEvent, +} from '../../__fixtures__/workflow-history-pending-events'; +import type { + ActivityHistoryGroup, + PendingActivityTaskStartEvent, + PendingDecisionTaskStartEvent, +} from '../../workflow-history.types'; +import WorkflowHistoryGrouper from '../workflow-history-grouper'; +import type { Props } from '../workflow-history-grouper.types'; + +// Create pending decision that matches the scheduleDecisionTaskEvent (eventId: '2') +const pendingDecisionForScheduledEvent = { + ...pendingDecisionTaskStartEvent, + computedEventId: 'pending-2', + pendingDecisionTaskStartEventAttributes: { + ...pendingDecisionTaskStartEvent.pendingDecisionTaskStartEventAttributes, + scheduleId: '2', + }, +} as const satisfies PendingDecisionTaskStartEvent; + +// Helper to create a grouper with a mock onChange +function createGrouper(options: Partial = {}) { + const onChange = jest.fn(); + const grouper = new WorkflowHistoryGrouper({ + onChange, + ...options, + }); + return { grouper, onChange }; +} + +// Helper to wait for processing to complete +function waitForProcessing(onChange: jest.Mock, timeout = 5000): Promise { + return new Promise((resolve, reject) => { + const startTime = Date.now(); + + const checkComplete = () => { + // Check if onChange was called with status 'idle' or 'error' + const lastCall = onChange.mock.calls[onChange.mock.calls.length - 1]; + if ( + lastCall && + (lastCall[0].status === 'idle' || lastCall[0].status === 'error') + ) { + resolve(); + return; + } + + // Check timeout + if (Date.now() - startTime > timeout) { + reject(new Error('Timeout waiting for processing to complete')); + return; + } + + // Check again soon + setTimeout(checkComplete, 10); + }; + + // Start checking after a short delay to let updateEvents kick off + setTimeout(checkComplete, 10); + }); +} + +describe(WorkflowHistoryGrouper.name, () => { + describe('basic event processing', () => { + it('should process events and create groups', async () => { + const { grouper, onChange } = createGrouper(); + + grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); + await waitForProcessing(onChange); + + const groups = grouper.getGroups(); + expect(groups).toBeDefined(); + expect(groups['7']).toBeDefined(); + expect(groups['7'].groupType).toBe('Activity'); + expect(grouper.getLastProcessedEventIndex()).toBe(2); + }); + + it('should not reprocess events on subsequent calls with same events', async () => { + const { grouper, onChange } = createGrouper(); + + // First call + grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); + await waitForProcessing(onChange); + + const initialGroups = grouper.getGroups(); + const initialIndex = grouper.getLastProcessedEventIndex(); + + // Second call with same events - should not trigger processing + grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); + // Give it a moment, but don't wait for onChange since nothing should happen + await new Promise((resolve) => setTimeout(resolve, 50)); + + expect(grouper.getGroups()).toEqual(initialGroups); + expect(grouper.getLastProcessedEventIndex()).toBe(initialIndex); + }); + + it('should process only new events on subsequent calls', async () => { + const { grouper, onChange } = createGrouper(); + + // First call with partial events + grouper.updateEvents([ + scheduleActivityTaskEvent, + startActivityTaskEvent, + ] as HistoryEvent[]); + await waitForProcessing(onChange); + + expect(grouper.getLastProcessedEventIndex()).toBe(1); + + // Second call with all events + onChange.mockClear(); + grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); + await waitForProcessing(onChange); + + const groups = grouper.getGroups(); + expect(grouper.getLastProcessedEventIndex()).toBe(2); + expect(groups['7']).toBeDefined(); + expect((groups['7'] as ActivityHistoryGroup).events).toHaveLength(3); + }); + }); + + describe('pending activities management', () => { + it('should add new pending activities to groups', async () => { + const { grouper, onChange } = createGrouper(); + + // First call with scheduled event only + grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Update with pending activity + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + const groups = grouper.getGroups(); + const activityGroup = groups['7'] as ActivityHistoryGroup; + expect(activityGroup.events).toHaveLength(2); + expect(activityGroup.events[1].attributes).toBe( + 'pendingActivityTaskStartEventAttributes' + ); + }); + + it('should remove stale pending activities from groups', async () => { + const { grouper, onChange } = createGrouper(); + + // First call with pending activity + grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + const firstGroups = grouper.getGroups(); + const firstActivityGroup = firstGroups['7'] as ActivityHistoryGroup; + expect(firstActivityGroup.events).toHaveLength(2); + + // Second call without pending activity (it completed) + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: null, + }); + + const groups = grouper.getGroups(); + const activityGroup = groups['7'] as ActivityHistoryGroup; + expect(activityGroup.events).toHaveLength(1); + expect(activityGroup.events[0].attributes).toBe( + 'activityTaskScheduledEventAttributes' + ); + }); + + it('should handle multiple pending activity state transitions', async () => { + const { grouper, onChange } = createGrouper(); + + // Initial state + grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Add pending activity + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + // Remove pending activity (it started) + onChange.mockClear(); + grouper.updateEvents([ + scheduleActivityTaskEvent, + startActivityTaskEvent, + ] as HistoryEvent[]); + await waitForProcessing(onChange); + + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: null, + }); + + const activityGroup = grouper.getGroups()['7'] as ActivityHistoryGroup; + expect(activityGroup.events).toHaveLength(2); + expect( + activityGroup.events.some( + (e) => e.attributes === 'pendingActivityTaskStartEventAttributes' + ) + ).toBe(false); + }); + }); + + describe('pending decision management', () => { + it('should add new pending decision to groups', async () => { + const { grouper, onChange } = createGrouper(); + + // First call with scheduled event only + grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Add pending decision + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: pendingDecisionForScheduledEvent, + }); + + const decisionGroup = grouper.getGroups()['2']; + expect(decisionGroup.groupType).toBe('Decision'); + expect(decisionGroup.events).toHaveLength(2); + }); + + it('should remove stale pending decision from groups', async () => { + const { grouper, onChange } = createGrouper(); + + // First call with pending decision + grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: pendingDecisionForScheduledEvent, + }); + + const firstGroups = grouper.getGroups(); + expect(firstGroups['2'].events).toHaveLength(2); + + // Second call without pending decision (it completed) + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: null, + }); + + const decisionGroup = grouper.getGroups()['2']; + expect(decisionGroup.events).toHaveLength(1); + }); + }); + + describe('state management', () => { + it('should track last processed event index correctly', () => { + const { grouper } = createGrouper(); + + expect(grouper.getLastProcessedEventIndex()).toBe(-1); + }); + + it('should return current groups without processing', () => { + const { grouper } = createGrouper(); + + const groups = grouper.getGroups(); + + expect(groups).toEqual({}); + }); + + it('should reset grouper state', async () => { + const { grouper, onChange } = createGrouper(); + + // Process some events + grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); + await waitForProcessing(onChange); + + expect(grouper.getLastProcessedEventIndex()).toBe(2); + expect(Object.keys(grouper.getGroups()).length).toBeGreaterThan(0); + + // Reset + grouper.reset(); + + expect(grouper.getLastProcessedEventIndex()).toBe(-1); + expect(grouper.getGroups()).toEqual({}); + }); + + it('should reprocess events after reset', async () => { + const { grouper, onChange } = createGrouper(); + + // Process events + grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); + await waitForProcessing(onChange); + + const firstGroups = grouper.getGroups(); + + // Reset and reprocess + grouper.reset(); + onChange.mockClear(); + grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); + await waitForProcessing(onChange); + + expect(grouper.getGroups()).toEqual(firstGroups); + }); + }); + + describe('pending event buffering', () => { + it('should buffer pending activity when group does not exist yet', async () => { + const { grouper, onChange } = createGrouper(); + + // Add pending activity BEFORE scheduled event exists + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + // Group should NOT exist yet (pending event is buffered) + let groups = grouper.getGroups(); + expect(groups['7']).toBeUndefined(); + + // Now add the scheduled event + grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Group should now exist with both scheduled and pending events + groups = grouper.getGroups(); + const activityGroup = groups['7'] as ActivityHistoryGroup; + expect(activityGroup).toBeDefined(); + expect(activityGroup.events).toHaveLength(2); + expect(activityGroup.events[0].attributes).toBe( + 'activityTaskScheduledEventAttributes' + ); + expect(activityGroup.events[1].attributes).toBe( + 'pendingActivityTaskStartEventAttributes' + ); + }); + + it('should buffer pending decision when group does not exist yet', async () => { + const { grouper, onChange } = createGrouper(); + + // Add pending decision BEFORE scheduled event exists + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: pendingDecisionForScheduledEvent, + }); + + // Group should NOT exist yet (pending event is buffered) + let groups = grouper.getGroups(); + expect(groups['2']).toBeUndefined(); + + // Now add the scheduled event + grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Group should now exist with both scheduled and pending events + groups = grouper.getGroups(); + const decisionGroup = groups['2']; + expect(decisionGroup).toBeDefined(); + expect(decisionGroup.groupType).toBe('Decision'); + expect(decisionGroup.events).toHaveLength(2); + }); + + it('should handle multiple buffered pending activities', async () => { + const { grouper, onChange } = createGrouper(); + + const pendingActivity1 = { + ...pendingActivityTaskStartEvent, + computedEventId: 'pending-7', + pendingActivityTaskStartEventAttributes: { + ...pendingActivityTaskStartEvent.pendingActivityTaskStartEventAttributes, + scheduleId: '7', + }, + } as const satisfies PendingActivityTaskStartEvent; + + const pendingActivity2 = { + ...pendingActivityTaskStartEvent, + computedEventId: 'pending-10', + pendingActivityTaskStartEventAttributes: { + ...pendingActivityTaskStartEvent.pendingActivityTaskStartEventAttributes, + scheduleId: '10', + activityId: '1', + }, + } as const satisfies PendingActivityTaskStartEvent; + + const scheduleEvent2 = { + ...scheduleActivityTaskEvent, + eventId: '10', + activityTaskScheduledEventAttributes: { + ...scheduleActivityTaskEvent.activityTaskScheduledEventAttributes, + activityId: '1', + }, + }; + + // Add multiple pending activities BEFORE their scheduled events + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivity1, pendingActivity2], + pendingStartDecision: null, + }); + + // No groups should exist yet + expect(Object.keys(grouper.getGroups()).length).toBe(0); + + // Add first scheduled event + grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + // First group should now exist + let groups = grouper.getGroups(); + expect(groups['7']).toBeDefined(); + expect(groups['10']).toBeUndefined(); + + // Add second scheduled event + onChange.mockClear(); + grouper.updateEvents([ + scheduleActivityTaskEvent, + scheduleEvent2, + ] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Both groups should now exist + groups = grouper.getGroups(); + expect(groups['7']).toBeDefined(); + expect(groups['10']).toBeDefined(); + expect((groups['7'] as ActivityHistoryGroup).events).toHaveLength(2); + expect((groups['10'] as ActivityHistoryGroup).events).toHaveLength(2); + }); + + it('should clear buffer when pending events are updated', async () => { + const { grouper } = createGrouper(); + + const pendingActivity1 = { + ...pendingActivityTaskStartEvent, + computedEventId: 'pending-7', + } as const satisfies PendingActivityTaskStartEvent; + + const pendingActivity2 = { + ...pendingActivityTaskStartEvent, + computedEventId: 'pending-10', + pendingActivityTaskStartEventAttributes: { + ...pendingActivityTaskStartEvent.pendingActivityTaskStartEventAttributes, + scheduleId: '10', + }, + } as const satisfies PendingActivityTaskStartEvent; + + // Add pending activity that won't have a group + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivity1], + pendingStartDecision: null, + }); + + // Update with different pending activity (old one should be removed from buffer) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivity2], + pendingStartDecision: null, + }); + + // No groups should exist + expect(Object.keys(grouper.getGroups()).length).toBe(0); + }); + + it('should clear buffer on reset', async () => { + const { grouper, onChange } = createGrouper(); + + // Add pending activity without scheduled event (will be buffered) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + // Reset the grouper + grouper.reset(); + + // Add scheduled event after reset + grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Group should only have scheduled event (buffered pending was cleared) + const groups = grouper.getGroups(); + const activityGroup = groups['7'] as ActivityHistoryGroup; + expect(activityGroup.events).toHaveLength(1); + expect(activityGroup.events[0].attributes).toBe( + 'activityTaskScheduledEventAttributes' + ); + }); + + it('should apply buffered pending events after updatePendingEvents if groups now exist', async () => { + const { grouper, onChange } = createGrouper(); + + // Add pending activity BEFORE scheduled event (will be buffered) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + // No group yet + expect(grouper.getGroups()['7']).toBeUndefined(); + + // Process scheduled event + grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Call updatePendingEvents again with same pending activity + // This should trigger applyBufferedPendingEvents and merge the buffered event + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + // Group should now have both events + const groups = grouper.getGroups(); + const activityGroup = groups['7'] as ActivityHistoryGroup; + expect(activityGroup.events).toHaveLength(2); + }); + + it('should handle scenario where scheduled event arrives after pending event update', async () => { + const { grouper, onChange } = createGrouper(); + + // Step 1: Pending activity arrives first (buffered) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + // Step 2: Scheduled event arrives + grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Step 3: Another updatePendingEvents call (maybe with different pending events) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + // Should have complete group with both events + const groups = grouper.getGroups(); + const activityGroup = groups['7'] as ActivityHistoryGroup; + expect(activityGroup).toBeDefined(); + expect(activityGroup.events).toHaveLength(2); + expect(activityGroup.events[0].attributes).toBe( + 'activityTaskScheduledEventAttributes' + ); + expect(activityGroup.events[1].attributes).toBe( + 'pendingActivityTaskStartEventAttributes' + ); + }); + + it('should not create incomplete groups when pending arrives before scheduled', async () => { + const { grouper } = createGrouper(); + + // Only add pending activity (no scheduled event) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); + + // Group should NOT exist in the UI + const groups = grouper.getGroups(); + expect(groups['7']).toBeUndefined(); + expect(Object.keys(groups).length).toBe(0); + }); + + it('should handle pending decision buffer clearing when decision changes', async () => { + const { grouper } = createGrouper(); + + const pendingDecision1 = { + ...pendingDecisionTaskStartEvent, + computedEventId: 'pending-7', + } as const satisfies PendingDecisionTaskStartEvent; + + const pendingDecision2 = { + ...pendingDecisionTaskStartEvent, + computedEventId: 'pending-10', + pendingDecisionTaskStartEventAttributes: { + ...pendingDecisionTaskStartEvent.pendingDecisionTaskStartEventAttributes, + scheduleId: '10', + }, + } as const satisfies PendingDecisionTaskStartEvent; + + // Buffer first decision + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: pendingDecision1, + }); + + // Update with different decision (old one should be removed from buffer) + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: pendingDecision2, + }); + + // No groups should exist + expect(Object.keys(grouper.getGroups()).length).toBe(0); + }); + }); + + describe('decision group filtering', () => { + it('should filter out pending decision when decision group has more than 2 events', async () => { + const { grouper, onChange } = createGrouper(); + + // Add scheduled event and pending decision + grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: pendingDecisionForScheduledEvent, + }); + + // Group should have 2 events (scheduled + pending) + let groups = grouper.getGroups(); + expect(groups['2'].events).toHaveLength(2); + + // Now add started event (makes it 3 events total) + onChange.mockClear(); + grouper.updateEvents([ + scheduleDecisionTaskEvent, + startDecisionTaskEvent, + ] as HistoryEvent[]); + await waitForProcessing(onChange); + + // Pending decision should be filtered out when there are more than 2 events + groups = grouper.getGroups(); + expect(groups['2'].events).toHaveLength(2); + expect( + groups['2'].events.some( + (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' + ) + ).toBe(false); + + // Add completed event (makes it 3+ events) + onChange.mockClear(); + grouper.updateEvents(completedDecisionTaskEvents as HistoryEvent[]); + await waitForProcessing(onChange); + + // Still should not have pending decision + groups = grouper.getGroups(); + expect(groups['2'].events).toHaveLength(3); + expect( + groups['2'].events.some( + (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' + ) + ).toBe(false); + }); + + it('should keep pending decision when decision group has exactly 2 events', async () => { + const { grouper, onChange } = createGrouper(); + + // Add scheduled event and pending decision + grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); + await waitForProcessing(onChange); + + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: pendingDecisionForScheduledEvent, + }); + + // Group should have 2 events (scheduled + pending) + const groups = grouper.getGroups(); + expect(groups['2'].events).toHaveLength(2); + expect( + groups['2'].events.some( + (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' + ) + ).toBe(true); + }); + }); + + describe('groups shallow copy in onChange', () => { + it('should return shallow copy of groups object in onChange callback', async () => { + const { grouper, onChange } = createGrouper(); + + grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); + await waitForProcessing(onChange); + + // Get groups from onChange callback + const lastCall = onChange.mock.calls[onChange.mock.calls.length - 1]; + const groupsFromCallback = lastCall[0].currentGroups; + + // Try to add a new group to the callback's groups object + groupsFromCallback['999'] = groupsFromCallback['7']; + + // Internal groups should not have the new group (shallow copy protects object structure) + const internalGroups = grouper.getGroups(); + expect(internalGroups['999']).toBeUndefined(); + }); + + it('should allow modification of group properties (shallow copy limitation)', async () => { + const { grouper, onChange } = createGrouper(); + + grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); + await waitForProcessing(onChange); + + // Get groups from onChange callback + const lastCall = onChange.mock.calls[onChange.mock.calls.length - 1]; + const groupsFromCallback = lastCall[0].currentGroups; + const originalLabel = groupsFromCallback['7'].label; + + // Modify a group's property - this WILL affect internal state (shallow copy limitation) + groupsFromCallback['7'].label = 'Modified Label'; + + // Internal groups ARE modified since group objects are shared references + const internalGroups = grouper.getGroups(); + expect(internalGroups['7'].label).toBe('Modified Label'); + expect(internalGroups['7'].label).not.toBe(originalLabel); + }); + }); +}); diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.ts b/src/views/workflow-history/helpers/workflow-history-grouper.ts new file mode 100644 index 000000000..51cb2638b --- /dev/null +++ b/src/views/workflow-history/helpers/workflow-history-grouper.ts @@ -0,0 +1,522 @@ +import { type HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; +import logger from '@/utils/logger'; + +import type { + ExtendedActivityHistoryEvent, + ExtendedDecisionHistoryEvent, + HistoryEventsGroup, + HistoryEventsGroups, + PendingActivityTaskStartEvent, + PendingDecisionTaskStartEvent, +} from '../workflow-history.types'; + +import isChildWorkflowExecutionEvent from './check-history-event-group/is-child-workflow-execution-event'; +import isExtendedActivityEvent from './check-history-event-group/is-extended-activity-event'; +import isExtendedDecisionEvent from './check-history-event-group/is-extended-decision-event'; +import isRequestCancelExternalWorkflowExecutionEvent from './check-history-event-group/is-request-cancel-external-workflow-execution-event'; +import isSignalExternalWorkflowExecutionEvent from './check-history-event-group/is-signal-external-workflow-execution-event'; +import isSingleEvent from './check-history-event-group/is-single-event'; +import isTimerEvent from './check-history-event-group/is-timer-event'; +import getHistoryEventGroupId from './get-history-event-group-id'; +import getActivityGroupFromEvents from './get-history-group-from-events/get-activity-group-from-events'; +import getChildWorkflowExecutionGroupFromEvents from './get-history-group-from-events/get-child-workflow-execution-group-from-events'; +import getDecisionGroupFromEvents from './get-history-group-from-events/get-decision-group-from-events'; +import getRequestCancelExternalWorkflowExecutionGroupFromEvents from './get-history-group-from-events/get-request-cancel-external-workflow-execution-group-from-events'; +import getSignalExternalWorkflowExecutionGroupFromEvents from './get-history-group-from-events/get-signal-external-workflow-execution-group-from-events'; +import getSingleEventGroupFromEvents from './get-history-group-from-events/get-single-event-group-from-events'; +import getTimerGroupFromEvents from './get-history-group-from-events/get-timer-group-from-events'; +import placeEventInGroupEvents from './place-event-in-group-events'; +import { + type GroupingProcessState, + type ProcessEventsParams, + type Props, +} from './workflow-history-grouper.types'; + +/** + * Stateful history events grouper that processes events incrementally. + * + * This class maintains the state of processed events and groups, allowing + * efficient incremental updates as new events arrive. It tracks pending + * activities and decisions, automatically adding new ones and removing + * stale ones from groups. + */ +export default class WorkflowHistoryGrouper { + private allEvents: HistoryEvent[] = []; + private lastProcessedEventIndex: number = -1; + private groups: HistoryEventsGroups = {}; + private currentPendingActivities: PendingActivityTaskStartEvent[] = []; + private currentPendingDecision: PendingDecisionTaskStartEvent | null = null; + private onChange: (state: GroupingProcessState) => void; + private batchSize?: number; + private isProcessing: boolean = false; + + // Buffer for pending events that arrived before their group exists + private bufferedPendingActivities: PendingActivityTaskStartEvent[] = []; + private bufferedPendingDecision: PendingDecisionTaskStartEvent | null = null; + + constructor({ onChange, batchSize }: Props) { + this.onChange = onChange; + this.batchSize = batchSize; + } + + /** + * Updates the events list and automatically starts processing. + * The processor will continue batch by batch until all events are processed. + * If already processing, the new events will be queued and processed after current batch completes. + * Listen to onChange for progress updates. + * + */ + public updateEvents(events: HistoryEvent[]): void { + // Update allEvents with the latest complete list + this.allEvents = events; + + // If already processing, the loop will automatically pick up the new events + // No need to do anything - the pointer-based approach handles this + if (this.isProcessing) { + return; + } + + this.startProcessing(); + } + + /** + * Updates pending events (activities and decisions). + * This should be called separately from updateEvents. + */ + public async updatePendingEvents(params: ProcessEventsParams) { + // Update pending events (add new ones, remove stale ones) + + const currentPendingActivities = this.currentPendingActivities; + const currentPendingDecision = this.currentPendingDecision; + + this.currentPendingActivities = params.pendingStartActivities; + this.currentPendingDecision = params.pendingStartDecision; + + this.bufferedPendingActivities = []; + this.bufferedPendingDecision = null; + + this.processPendingEvents( + currentPendingActivities, + params.pendingStartActivities, + currentPendingDecision, + params.pendingStartDecision + ); + } + + /** + * Resets the grouper state, clearing all processed events and groups. + * Useful for reprocessing events from scratch. + */ + public reset(): void { + this.allEvents = []; + this.lastProcessedEventIndex = -1; + this.groups = {}; + this.currentPendingActivities = []; + this.currentPendingDecision = null; + this.bufferedPendingActivities = []; + this.bufferedPendingDecision = null; + this.isProcessing = false; + } + + /** + * Gets the current groups without processing new events. + * + * @returns Current state of groups + */ + public getGroups(): HistoryEventsGroups { + return this.groups; + } + + /** + * Gets the index of the last processed event. + */ + public getLastProcessedEventIndex(): number { + return this.lastProcessedEventIndex; + } + + // ============================================================================ + // Private Implementation + // ============================================================================ + + /** + * Starts the processing cycle. + * Schedules the first batch - all batches go through the scheduler. + */ + private startProcessing(): void { + // Check if there are events to process + if ( + this.isProcessing || + this.lastProcessedEventIndex >= this.allEvents.length - 1 + ) { + return; + } + + this.isProcessing = true; + + // Schedule the first batch (and all subsequent batches will be scheduled too) + this.scheduleNextBatch(); + } + + /** + * Schedules the next batch using the best available API. + * Uses Scheduler API if available, otherwise falls back to Promise microtask. + */ + private scheduleNextBatch(): void { + // Check if Scheduler API is available + const useScheduler = true; + if ( + useScheduler && + typeof window !== 'undefined' && + 'scheduler' in window && + 'postTask' in (window.scheduler as any) + ) { + // Use Scheduler API with background priority for non-urgent work + (window.scheduler as any) + .postTask(() => this.processBatch(), { priority: 'background' }) + .catch(() => { + // Fallback if postTask fails + setTimeout(() => this.processBatch(), 0); + }); + } else { + // Fallback to Promise microtask + setTimeout(() => this.processBatch(), 0); + } + } + + /** + * Processes a single batch of events (or all remaining events if no batchSize). + * This method handles the core grouping logic and schedules itself for the next batch. + */ + private async processBatch(): Promise { + // Check if there are events to process + if (this.lastProcessedEventIndex >= this.allEvents.length - 1) { + this.isProcessing = false; + return; + } + + // Calculate batch boundaries + const batchStart = this.lastProcessedEventIndex + 1; + const batchEnd = + this.batchSize !== undefined && this.batchSize > 0 + ? Math.min(batchStart + this.batchSize, this.allEvents.length) + : this.allEvents.length; + + // Process this batch synchronously using indices (avoids array slicing) + this.groups = this.groupEvents(batchStart, batchEnd, this.groups); + + // After processing new events, try to apply any buffered pending events + // whose groups may now exist + this.applyBufferedPendingEvents(); + + // Move pointer forward + this.lastProcessedEventIndex = batchEnd - 1; + + // Calculate progress + const processedEventsCount = this.lastProcessedEventIndex + 1; + const remainingEventsCount = this.allEvents.length - processedEventsCount; + + // Report progress + this.onChange({ + currentGroups: { ...this.groups }, + processedEventsCount, + remainingEventsCount, + status: remainingEventsCount > 0 ? 'processing' : 'idle', + }); + + // Check if there are more events to process + if (this.lastProcessedEventIndex < this.allEvents.length - 1) { + this.scheduleNextBatch(); + } else { + // All done + this.isProcessing = false; + } + } + + /** + * Groups a batch of new events and updates existing groups. + * Synchronous implementation that processes events immediately. + */ + private groupEvents( + startIndex: number, + endIndex: number, + existingGroups: HistoryEventsGroups + ): HistoryEventsGroups { + const groups = { ...existingGroups }; + + // Process new history events using indices (avoids array slicing) + for (let i = startIndex; i < endIndex; i++) { + const event = this.allEvents[i]; + const groupId = getHistoryEventGroupId(event); + if (!groupId) { + logger.warn( + { + eventId: event.eventId, + eventTime: event.eventTime, + }, + "Couldn't extract groupId from event, check event payload and extraction logic" + ); + continue; + } + + const defaultGroupDetails: Partial = { + events: [], + hasMissingEvents: false, + label: '', + }; + const currentGroup = groups[groupId] || defaultGroupDetails; + const updatedEventsArr = placeEventInGroupEvents( + event, + currentGroup.events + ); + + if (updatedEventsArr.every(isExtendedActivityEvent)) { + groups[groupId] = getActivityGroupFromEvents(updatedEventsArr); + } else if (updatedEventsArr.every(isExtendedDecisionEvent)) { + // If there are more than 2 decision events, filter out the pending decision task start event + // Pending decision task start event is only added to the group when the scheduled decision task event is added + // This logic can be moved later to getDecisionGroupFromEvents + const filteredDecisionEvents = + updatedEventsArr.length > 2 + ? updatedEventsArr.filter( + (e) => + e.attributes !== 'pendingDecisionTaskStartEventAttributes' + ) + : updatedEventsArr; + groups[groupId] = getDecisionGroupFromEvents(filteredDecisionEvents); + } else if (updatedEventsArr.every(isTimerEvent)) { + groups[groupId] = getTimerGroupFromEvents(updatedEventsArr); + } else if (updatedEventsArr.every(isChildWorkflowExecutionEvent)) { + groups[groupId] = + getChildWorkflowExecutionGroupFromEvents(updatedEventsArr); + } else if ( + updatedEventsArr.every(isSignalExternalWorkflowExecutionEvent) + ) { + groups[groupId] = + getSignalExternalWorkflowExecutionGroupFromEvents(updatedEventsArr); + } else if ( + updatedEventsArr.every(isRequestCancelExternalWorkflowExecutionEvent) + ) { + groups[groupId] = + getRequestCancelExternalWorkflowExecutionGroupFromEvents( + updatedEventsArr + ); + } else if (updatedEventsArr.every(isSingleEvent)) { + groups[groupId] = getSingleEventGroupFromEvents(updatedEventsArr); + } else { + logger.warn( + { + eventId: event.eventId, + eventTime: event.eventTime, + events: updatedEventsArr.map(({ eventId, eventTime }) => ({ + eventId, + eventTime, + })), + }, + 'No handler for grouping this event' + ); + } + } + + return groups; + } + + /** + * Adds a pending activity to a group, removing any existing pending activities first. + * Only adds the new pending activity if it has an eventTime. + */ + private addPendingActivityToGroup( + groupId: string, + pendingActivity: PendingActivityTaskStartEvent + ) { + const currentGroup = this.groups[groupId]; + if (currentGroup && currentGroup.events.every(isExtendedActivityEvent)) { + const filteredEvents = currentGroup.events.filter( + (e) => e.attributes !== 'pendingActivityTaskStartEventAttributes' + ) as ExtendedActivityHistoryEvent[]; + + this.groups[groupId] = getActivityGroupFromEvents([ + ...filteredEvents, + pendingActivity as ExtendedActivityHistoryEvent, + ]); + } + } + + /** + * Adds a pending decision to a group, removing any existing pending decision first. + * Only adds if the group has exactly one scheduled event. + */ + private updatePendingDecisionInGroup( + groupId: string, + pendingDecision: PendingDecisionTaskStartEvent | null + ) { + const currentGroup = this.groups[groupId]; + if (currentGroup && currentGroup.events.every(isExtendedDecisionEvent)) { + // Remove any existing pending decision + const filteredEvents = currentGroup.events.filter( + (e) => e.attributes !== 'pendingDecisionTaskStartEventAttributes' + ) as ExtendedDecisionHistoryEvent[]; + + // Only add pending decision if group has exactly one scheduled event + if ( + pendingDecision && + filteredEvents.length === 1 && + filteredEvents[0].attributes === 'decisionTaskScheduledEventAttributes' + ) { + const updatedEventsArr: ExtendedDecisionHistoryEvent[] = [ + ...filteredEvents, + pendingDecision, + ]; + this.groups[groupId] = getDecisionGroupFromEvents(updatedEventsArr); + } else { + // Just update without pending decision + this.groups[groupId] = getDecisionGroupFromEvents(filteredEvents); + } + } + } + + /** + * Updates pending activities and decisions. + */ + private processPendingEvents( + currentPendingActivities: PendingActivityTaskStartEvent[], + newPendingActivities: PendingActivityTaskStartEvent[], + currentPendingDecision: PendingDecisionTaskStartEvent | null, + newPendingDecision: PendingDecisionTaskStartEvent | null + ) { + this.updatePendingActivities( + currentPendingActivities, + newPendingActivities + ); + + this.updatePendingDecision(currentPendingDecision, newPendingDecision); + } + + /** + * Updates pending activities in groups by removing old ones and adding new ones. + * If a group doesn't exist yet, buffers the pending activity until the + * scheduled event arrives. + * Buffer is already cleared before this is called, so we're rebuilding from scratch. + */ + private updatePendingActivities( + currentPendingActivities: PendingActivityTaskStartEvent[], + newPendingActivities: PendingActivityTaskStartEvent[] + ): void { + const existingPendingGroups = new Set( + currentPendingActivities.map((pa) => getHistoryEventGroupId(pa)) + ); + // First, remove all current pending activities from their groups + currentPendingActivities.forEach((pa) => { + const groupId = getHistoryEventGroupId(pa); + if (groupId && existingPendingGroups.has(groupId)) { + const currentGroup = this.groups[groupId]; + if ( + currentGroup && + currentGroup.events.every(isExtendedActivityEvent) + ) { + const filteredEvents = currentGroup.events.filter( + (e) => e.attributes !== 'pendingActivityTaskStartEventAttributes' + ); + + this.groups[groupId] = getActivityGroupFromEvents(filteredEvents); + } + } + }); + + // Then, add all new pending activities to their groups (or buffer them) + newPendingActivities.forEach((pa) => { + const groupId = getHistoryEventGroupId(pa); + if (!groupId) { + logger.warn( + { + computedEventId: pa.computedEventId, + eventTime: pa.eventTime, + }, + "Couldn't extract groupId from pending activity event" + ); + return; + } + + if (this.groups[groupId]) { + this.addPendingActivityToGroup(groupId, pa); + } else { + this.bufferedPendingActivities.push(pa); + } + }); + } + + /** + * Adds the current pending decision to groups. + * If the group doesn't exist yet, buffers the pending decision until the + * scheduled event arrives. + * Buffer was cleared before this is called, so we're rebuilding from scratch. + */ + private updatePendingDecision( + currentPendingDecision: PendingDecisionTaskStartEvent | null, + newPendingDecision: PendingDecisionTaskStartEvent | null + ): void { + // Remove old pending decision from its group (if exists) + if (currentPendingDecision) { + const groupId = getHistoryEventGroupId(currentPendingDecision); + if (groupId) { + this.updatePendingDecisionInGroup(groupId, null); + } + } + + // Add new pending decision (to group or buffer) + if (newPendingDecision) { + const groupId = getHistoryEventGroupId(newPendingDecision); + if (!groupId) { + logger.warn( + { + computedEventId: newPendingDecision.computedEventId, + eventTime: newPendingDecision.eventTime, + }, + "Couldn't extract groupId from pending decision event" + ); + return; + } + + if (this.groups[groupId]) { + this.updatePendingDecisionInGroup(groupId, newPendingDecision); + } else { + this.bufferedPendingDecision = newPendingDecision; + } + } + } + + /** + * Applies buffered pending events to groups when their scheduled events arrive. + * This is called after processing new events to merge any pending events + * that were waiting for their groups to be created. + */ + private applyBufferedPendingEvents(): void { + // Apply buffered pending activities + const activitiesToKeepBuffered: PendingActivityTaskStartEvent[] = []; + + this.bufferedPendingActivities.forEach((activity) => { + const groupId = getHistoryEventGroupId(activity); + if (groupId && this.groups[groupId]) { + this.addPendingActivityToGroup(groupId, activity); + } else { + activitiesToKeepBuffered.push(activity); + } + }); + + this.bufferedPendingActivities = activitiesToKeepBuffered; + + // Apply buffered pending decision + if (this.bufferedPendingDecision) { + const groupId = getHistoryEventGroupId(this.bufferedPendingDecision); + if (groupId) { + // Try to add to existing group using helper + if (this.groups[groupId]) { + this.updatePendingDecisionInGroup( + groupId, + this.bufferedPendingDecision + ); + this.bufferedPendingDecision = null; + } + } + } + } +} diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts new file mode 100644 index 000000000..1674624dd --- /dev/null +++ b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts @@ -0,0 +1,57 @@ +import { type HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; + +import type { + HistoryEventsGroups, + PendingActivityTaskStartEvent, + PendingDecisionTaskStartEvent, +} from '../workflow-history.types'; + +export type ProcessEventsParams = { + pendingStartActivities: PendingActivityTaskStartEvent[]; + pendingStartDecision: PendingDecisionTaskStartEvent | null; +}; + +export type ProcessEventsResult = { + groups: HistoryEventsGroups; + lastProcessedEventIndex: number; +}; + +/** + * Processing status for incremental grouping operations. + */ +export type ProcessingStatus = 'idle' | 'processing' | 'error'; + +/** + * State snapshot of the grouping process. + */ +export type GroupingProcessState = { + /** Current groups accumulated so far */ + currentGroups: HistoryEventsGroups; + /** Number of events that have been successfully processed since the grouper was created/reset */ + processedEventsCount: number; + /** Number of events that are still pending (not yet processed) */ + remainingEventsCount: number; + /** Current processing status */ + status: ProcessingStatus; +}; + +/** + * Callback invoked when grouping state changes. + */ +export type GroupingStateChangeCallback = (state: GroupingProcessState) => void; + +export type Props = { + /** + * Callback invoked when grouping state changes. + * Provides real-time updates on processing progress. + * Required to receive state updates. + */ + onChange: GroupingStateChangeCallback; + + /** + * Batch size for incremental processing. + * If specified, events will be processed in batches to allow progress updates. + * If not specified, all events are processed at once. + */ + batchSize?: number; +}; From ca4f4323963107a1b59d49d5836a39d052c6bb89 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Wed, 5 Nov 2025 23:29:48 +0100 Subject: [PATCH 08/58] update test cases Signed-off-by: Assem Hafez --- .../workflow-history-pending-events.ts | 54 + .../workflow-history-grouper.test.tsx | 1164 ++++++++--------- .../helpers/workflow-history-grouper.ts | 7 +- .../helpers/workflow-history-grouper.types.ts | 2 +- 4 files changed, 604 insertions(+), 623 deletions(-) diff --git a/src/views/workflow-history/__fixtures__/workflow-history-pending-events.ts b/src/views/workflow-history/__fixtures__/workflow-history-pending-events.ts index d08e326de..646bad461 100644 --- a/src/views/workflow-history/__fixtures__/workflow-history-pending-events.ts +++ b/src/views/workflow-history/__fixtures__/workflow-history-pending-events.ts @@ -3,6 +3,9 @@ import type { PendingDecisionTaskStartEvent, } from '../workflow-history.types'; +import { scheduleActivityTaskEvent } from './workflow-history-activity-events'; +import { scheduleDecisionTaskEvent } from './workflow-history-decision-events'; + export const pendingActivityTaskStartEvent = { eventId: null, computedEventId: 'pending-7', @@ -94,3 +97,54 @@ export const pendingDecisionTaskStartEventWithStartedState = { }, }, } as const satisfies PendingDecisionTaskStartEvent; + +// Factory functions for creating test data dynamically + +export function createPendingActivity( + scheduleId: string, + options?: { activityId?: string } +): PendingActivityTaskStartEvent { + return { + ...pendingActivityTaskStartEvent, + computedEventId: `pending-${scheduleId}`, + pendingActivityTaskStartEventAttributes: { + ...pendingActivityTaskStartEvent.pendingActivityTaskStartEventAttributes, + scheduleId, + ...(options?.activityId && { activityId: options.activityId }), + }, + } as PendingActivityTaskStartEvent; +} + +export function createPendingDecision( + scheduleId: string +): PendingDecisionTaskStartEvent { + return { + ...pendingDecisionTaskStartEvent, + computedEventId: `pending-${scheduleId}`, + pendingDecisionTaskStartEventAttributes: { + ...pendingDecisionTaskStartEvent.pendingDecisionTaskStartEventAttributes, + scheduleId, + }, + } as PendingDecisionTaskStartEvent; +} + +export function createScheduleActivityEvent( + eventId: string, + options?: { activityId?: string } +) { + return { + ...scheduleActivityTaskEvent, + eventId, + activityTaskScheduledEventAttributes: { + ...scheduleActivityTaskEvent.activityTaskScheduledEventAttributes, + ...(options?.activityId && { activityId: options.activityId }), + }, + }; +} + +export function createScheduleDecisionEvent(eventId: string) { + return { + ...scheduleDecisionTaskEvent, + eventId, + }; +} diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx index 706a7e99f..231644afb 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx @@ -1,718 +1,648 @@ -import { type HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; - import { completedActivityTaskEvents, - scheduleActivityTaskEvent, startActivityTaskEvent, } from '../../__fixtures__/workflow-history-activity-events'; +import { startDecisionTaskEvent } from '../../__fixtures__/workflow-history-decision-events'; import { - completedDecisionTaskEvents, - scheduleDecisionTaskEvent, - startDecisionTaskEvent, -} from '../../__fixtures__/workflow-history-decision-events'; -import { + createPendingActivity, + createPendingDecision, + createScheduleActivityEvent, + createScheduleDecisionEvent, pendingActivityTaskStartEvent, - pendingDecisionTaskStartEvent, } from '../../__fixtures__/workflow-history-pending-events'; -import type { - ActivityHistoryGroup, - PendingActivityTaskStartEvent, - PendingDecisionTaskStartEvent, -} from '../../workflow-history.types'; import WorkflowHistoryGrouper from '../workflow-history-grouper'; -import type { Props } from '../workflow-history-grouper.types'; - -// Create pending decision that matches the scheduleDecisionTaskEvent (eventId: '2') -const pendingDecisionForScheduledEvent = { - ...pendingDecisionTaskStartEvent, - computedEventId: 'pending-2', - pendingDecisionTaskStartEventAttributes: { - ...pendingDecisionTaskStartEvent.pendingDecisionTaskStartEventAttributes, - scheduleId: '2', - }, -} as const satisfies PendingDecisionTaskStartEvent; - -// Helper to create a grouper with a mock onChange -function createGrouper(options: Partial = {}) { - const onChange = jest.fn(); - const grouper = new WorkflowHistoryGrouper({ - onChange, - ...options, - }); - return { grouper, onChange }; -} +import type { + GroupingStateChangeCallback, + Props, +} from '../workflow-history-grouper.types'; -// Helper to wait for processing to complete -function waitForProcessing(onChange: jest.Mock, timeout = 5000): Promise { - return new Promise((resolve, reject) => { - const startTime = Date.now(); - - const checkComplete = () => { - // Check if onChange was called with status 'idle' or 'error' - const lastCall = onChange.mock.calls[onChange.mock.calls.length - 1]; - if ( - lastCall && - (lastCall[0].status === 'idle' || lastCall[0].status === 'error') - ) { - resolve(); - return; - } +// Commonly used mocks - // Check timeout - if (Date.now() - startTime > timeout) { - reject(new Error('Timeout waiting for processing to complete')); - return; - } +// Track all setups for cleanup +const allCleanups: Array<() => void> = []; - // Check again soon - setTimeout(checkComplete, 10); - }; +describe(WorkflowHistoryGrouper.name, () => { + afterEach(async () => { + // Clean up any pending timeouts from all tests + allCleanups.forEach((cleanup) => cleanup()); + allCleanups.length = 0; - // Start checking after a short delay to let updateEvents kick off - setTimeout(checkComplete, 10); + // Give time for any pending async operations to complete + await new Promise((resolve) => setTimeout(resolve, 10)); }); -} -describe(WorkflowHistoryGrouper.name, () => { - describe('basic event processing', () => { - it('should process events and create groups', async () => { - const { grouper, onChange } = createGrouper(); - - grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); - await waitForProcessing(onChange); - - const groups = grouper.getGroups(); - expect(groups).toBeDefined(); - expect(groups['7']).toBeDefined(); - expect(groups['7'].groupType).toBe('Activity'); - expect(grouper.getLastProcessedEventIndex()).toBe(2); - }); + it('should process events and create groups', async () => { + const { grouper, waitForProcessing } = setup(); - it('should not reprocess events on subsequent calls with same events', async () => { - const { grouper, onChange } = createGrouper(); + grouper.updateEvents(completedActivityTaskEvents); + await waitForProcessing(); - // First call - grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); - await waitForProcessing(onChange); + const groups = grouper.getGroups(); + expect(groups).toBeDefined(); + expect(groups['7']).toBeDefined(); + expect(groups['7'].groupType).toBe('Activity'); + expect(grouper.getLastProcessedEventIndex()).toBe(2); + }); - const initialGroups = grouper.getGroups(); - const initialIndex = grouper.getLastProcessedEventIndex(); + it('should have getLastProcessedEventIndex pointing to the last processed event', async () => { + const { grouper, waitForProcessing } = setup(); + expect(grouper.getLastProcessedEventIndex()).toBe(-1); - // Second call with same events - should not trigger processing - grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); - // Give it a moment, but don't wait for onChange since nothing should happen - await new Promise((resolve) => setTimeout(resolve, 50)); + // First call with partial events + grouper.updateEvents([ + completedActivityTaskEvents[0], + completedActivityTaskEvents[1], + ]); + await waitForProcessing(); - expect(grouper.getGroups()).toEqual(initialGroups); - expect(grouper.getLastProcessedEventIndex()).toBe(initialIndex); - }); + expect(grouper.getLastProcessedEventIndex()).toBe(1); - it('should process only new events on subsequent calls', async () => { - const { grouper, onChange } = createGrouper(); + // Second call with all events + grouper.updateEvents(completedActivityTaskEvents); + await waitForProcessing(); - // First call with partial events - grouper.updateEvents([ - scheduleActivityTaskEvent, - startActivityTaskEvent, - ] as HistoryEvent[]); - await waitForProcessing(onChange); + expect(grouper.getLastProcessedEventIndex()).toBe(2); + }); - expect(grouper.getLastProcessedEventIndex()).toBe(1); + it('should add new pending activities to groups', async () => { + const { grouper, waitForProcessing } = setup(); - // Second call with all events - onChange.mockClear(); - grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); - await waitForProcessing(onChange); + // First call with scheduled event only + grouper.updateEvents([createScheduleActivityEvent('7')]); + await waitForProcessing(); - const groups = grouper.getGroups(); - expect(grouper.getLastProcessedEventIndex()).toBe(2); - expect(groups['7']).toBeDefined(); - expect((groups['7'] as ActivityHistoryGroup).events).toHaveLength(3); + // Update with pending activity + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, }); + + const groups = grouper.getGroups(); + const activityGroup = groups['7']; + expect(activityGroup.events).toHaveLength(2); + expect(activityGroup.events[1].attributes).toBe( + 'pendingActivityTaskStartEventAttributes' + ); }); - describe('pending activities management', () => { - it('should add new pending activities to groups', async () => { - const { grouper, onChange } = createGrouper(); - - // First call with scheduled event only - grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - // Update with pending activity - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); - - const groups = grouper.getGroups(); - const activityGroup = groups['7'] as ActivityHistoryGroup; - expect(activityGroup.events).toHaveLength(2); - expect(activityGroup.events[1].attributes).toBe( - 'pendingActivityTaskStartEventAttributes' - ); + it('should add new pending decision to groups', async () => { + const { grouper, waitForProcessing } = setup(); + + // First call with scheduled event only + grouper.updateEvents([createScheduleDecisionEvent('2')]); + await waitForProcessing(); + + // Add pending decision + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: createPendingDecision('2'), }); - it('should remove stale pending activities from groups', async () => { - const { grouper, onChange } = createGrouper(); - - // First call with pending activity - grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); - - const firstGroups = grouper.getGroups(); - const firstActivityGroup = firstGroups['7'] as ActivityHistoryGroup; - expect(firstActivityGroup.events).toHaveLength(2); - - // Second call without pending activity (it completed) - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: null, - }); - - const groups = grouper.getGroups(); - const activityGroup = groups['7'] as ActivityHistoryGroup; - expect(activityGroup.events).toHaveLength(1); - expect(activityGroup.events[0].attributes).toBe( - 'activityTaskScheduledEventAttributes' - ); + const decisionGroup = grouper.getGroups()['2']; + expect(decisionGroup.groupType).toBe('Decision'); + expect(decisionGroup.events).toHaveLength(2); + }); + + it('should remove stale pending activities from groups', async () => { + const { grouper, waitForProcessing } = setup(); + + // First call with pending activity + grouper.updateEvents([createScheduleActivityEvent('7')]); + await waitForProcessing(); + + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, }); - it('should handle multiple pending activity state transitions', async () => { - const { grouper, onChange } = createGrouper(); - - // Initial state - grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - // Add pending activity - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); - - // Remove pending activity (it started) - onChange.mockClear(); - grouper.updateEvents([ - scheduleActivityTaskEvent, - startActivityTaskEvent, - ] as HistoryEvent[]); - await waitForProcessing(onChange); - - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: null, - }); - - const activityGroup = grouper.getGroups()['7'] as ActivityHistoryGroup; - expect(activityGroup.events).toHaveLength(2); - expect( - activityGroup.events.some( - (e) => e.attributes === 'pendingActivityTaskStartEventAttributes' - ) - ).toBe(false); + const firstGroups = grouper.getGroups(); + const firstActivityGroup = firstGroups['7']; + expect(firstActivityGroup.events).toHaveLength(2); + + // Second call without pending activity (it completed) + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: null, }); + + const groups = grouper.getGroups(); + const activityGroup = groups['7']; + expect(activityGroup.events).toHaveLength(1); + expect(activityGroup.events[0].attributes).toBe( + 'activityTaskScheduledEventAttributes' + ); }); - describe('pending decision management', () => { - it('should add new pending decision to groups', async () => { - const { grouper, onChange } = createGrouper(); + it('should remove stale pending decision from groups', async () => { + const { grouper, waitForProcessing } = setup(); - // First call with scheduled event only - grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); + // First call with pending decision + grouper.updateEvents([createScheduleDecisionEvent('2')]); + await waitForProcessing(); - // Add pending decision - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: pendingDecisionForScheduledEvent, - }); + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: createPendingDecision('2'), + }); + + const firstGroups = grouper.getGroups(); + expect(firstGroups['2'].events).toHaveLength(2); - const decisionGroup = grouper.getGroups()['2']; - expect(decisionGroup.groupType).toBe('Decision'); - expect(decisionGroup.events).toHaveLength(2); + // Second call without pending decision (it completed) + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: null, }); - it('should remove stale pending decision from groups', async () => { - const { grouper, onChange } = createGrouper(); + const decisionGroup = grouper.getGroups()['2']; + expect(decisionGroup.events).toHaveLength(1); + }); - // First call with pending decision - grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); + it('should handle multiple pending activity state transitions', async () => { + const { grouper, waitForProcessing } = setup(); - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: pendingDecisionForScheduledEvent, - }); + // Initial state + grouper.updateEvents([createScheduleActivityEvent('7')]); + await waitForProcessing(); - const firstGroups = grouper.getGroups(); - expect(firstGroups['2'].events).toHaveLength(2); + // Add pending activity + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }); - // Second call without pending decision (it completed) - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: null, - }); + // Remove pending activity (it started) + grouper.updateEvents([ + createScheduleActivityEvent('7'), + startActivityTaskEvent, + ]); + await waitForProcessing(); - const decisionGroup = grouper.getGroups()['2']; - expect(decisionGroup.events).toHaveLength(1); + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: null, }); + + const activityGroup = grouper.getGroups()['7']; + expect(activityGroup.events).toHaveLength(2); + expect( + activityGroup.events.some( + (e) => e.attributes === 'pendingActivityTaskStartEventAttributes' + ) + ).toBe(false); }); - describe('state management', () => { - it('should track last processed event index correctly', () => { - const { grouper } = createGrouper(); + it('should return current groups without processing', () => { + const { grouper } = setup(); - expect(grouper.getLastProcessedEventIndex()).toBe(-1); - }); + const groups = grouper.getGroups(); - it('should return current groups without processing', () => { - const { grouper } = createGrouper(); + expect(groups).toEqual({}); + }); - const groups = grouper.getGroups(); + it('should reset grouper state', async () => { + const { grouper, waitForProcessing } = setup(); - expect(groups).toEqual({}); - }); + // Process some events + grouper.updateEvents(completedActivityTaskEvents); + await waitForProcessing(); - it('should reset grouper state', async () => { - const { grouper, onChange } = createGrouper(); + expect(grouper.getLastProcessedEventIndex()).toBe(2); + expect(Object.keys(grouper.getGroups()).length).toBeGreaterThan(0); - // Process some events - grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); - await waitForProcessing(onChange); + // Reset + grouper.reset(); - expect(grouper.getLastProcessedEventIndex()).toBe(2); - expect(Object.keys(grouper.getGroups()).length).toBeGreaterThan(0); + expect(grouper.getLastProcessedEventIndex()).toBe(-1); + expect(grouper.getGroups()).toEqual({}); + }); - // Reset - grouper.reset(); + it('should reprocess events after reset', async () => { + const { grouper, waitForProcessing } = setup(); - expect(grouper.getLastProcessedEventIndex()).toBe(-1); - expect(grouper.getGroups()).toEqual({}); - }); + // Process events + grouper.updateEvents(completedActivityTaskEvents); + await waitForProcessing(); - it('should reprocess events after reset', async () => { - const { grouper, onChange } = createGrouper(); + const firstGroups = grouper.getGroups(); - // Process events - grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); - await waitForProcessing(onChange); + // Reset and reprocess + grouper.reset(); + expect(grouper.getGroups()).toEqual({}); - const firstGroups = grouper.getGroups(); + grouper.updateEvents(completedActivityTaskEvents); + await waitForProcessing(); - // Reset and reprocess - grouper.reset(); - onChange.mockClear(); - grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); - await waitForProcessing(onChange); + expect(grouper.getGroups()).toEqual(firstGroups); + }); - expect(grouper.getGroups()).toEqual(firstGroups); + it('should buffer pending activity when group does not exist yet', async () => { + const { grouper, waitForProcessing } = setup(); + + // Add pending activity BEFORE scheduled event exists + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, }); + + // Group should NOT exist yet (pending event is buffered) + let groups = grouper.getGroups(); + expect(groups['7']).toBeUndefined(); + + // Now add the scheduled event + grouper.updateEvents([createScheduleActivityEvent('7')]); + await waitForProcessing(); + + // Group should now exist with both scheduled and pending events + groups = grouper.getGroups(); + const activityGroup = groups['7']; + expect(activityGroup).toBeDefined(); + expect(activityGroup.events).toHaveLength(2); + expect(activityGroup.events[0].attributes).toBe( + 'activityTaskScheduledEventAttributes' + ); + expect(activityGroup.events[1].attributes).toBe( + 'pendingActivityTaskStartEventAttributes' + ); }); - describe('pending event buffering', () => { - it('should buffer pending activity when group does not exist yet', async () => { - const { grouper, onChange } = createGrouper(); - - // Add pending activity BEFORE scheduled event exists - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); - - // Group should NOT exist yet (pending event is buffered) - let groups = grouper.getGroups(); - expect(groups['7']).toBeUndefined(); - - // Now add the scheduled event - grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - // Group should now exist with both scheduled and pending events - groups = grouper.getGroups(); - const activityGroup = groups['7'] as ActivityHistoryGroup; - expect(activityGroup).toBeDefined(); - expect(activityGroup.events).toHaveLength(2); - expect(activityGroup.events[0].attributes).toBe( - 'activityTaskScheduledEventAttributes' - ); - expect(activityGroup.events[1].attributes).toBe( - 'pendingActivityTaskStartEventAttributes' - ); - }); + it('should buffer pending decision when group does not exist yet', async () => { + const { grouper, waitForProcessing } = setup(); - it('should buffer pending decision when group does not exist yet', async () => { - const { grouper, onChange } = createGrouper(); - - // Add pending decision BEFORE scheduled event exists - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: pendingDecisionForScheduledEvent, - }); - - // Group should NOT exist yet (pending event is buffered) - let groups = grouper.getGroups(); - expect(groups['2']).toBeUndefined(); - - // Now add the scheduled event - grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - // Group should now exist with both scheduled and pending events - groups = grouper.getGroups(); - const decisionGroup = groups['2']; - expect(decisionGroup).toBeDefined(); - expect(decisionGroup.groupType).toBe('Decision'); - expect(decisionGroup.events).toHaveLength(2); + // Add pending decision BEFORE scheduled event exists + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: createPendingDecision('2'), }); - it('should handle multiple buffered pending activities', async () => { - const { grouper, onChange } = createGrouper(); - - const pendingActivity1 = { - ...pendingActivityTaskStartEvent, - computedEventId: 'pending-7', - pendingActivityTaskStartEventAttributes: { - ...pendingActivityTaskStartEvent.pendingActivityTaskStartEventAttributes, - scheduleId: '7', - }, - } as const satisfies PendingActivityTaskStartEvent; - - const pendingActivity2 = { - ...pendingActivityTaskStartEvent, - computedEventId: 'pending-10', - pendingActivityTaskStartEventAttributes: { - ...pendingActivityTaskStartEvent.pendingActivityTaskStartEventAttributes, - scheduleId: '10', - activityId: '1', - }, - } as const satisfies PendingActivityTaskStartEvent; - - const scheduleEvent2 = { - ...scheduleActivityTaskEvent, - eventId: '10', - activityTaskScheduledEventAttributes: { - ...scheduleActivityTaskEvent.activityTaskScheduledEventAttributes, - activityId: '1', - }, - }; - - // Add multiple pending activities BEFORE their scheduled events - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivity1, pendingActivity2], - pendingStartDecision: null, - }); - - // No groups should exist yet - expect(Object.keys(grouper.getGroups()).length).toBe(0); - - // Add first scheduled event - grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - // First group should now exist - let groups = grouper.getGroups(); - expect(groups['7']).toBeDefined(); - expect(groups['10']).toBeUndefined(); - - // Add second scheduled event - onChange.mockClear(); - grouper.updateEvents([ - scheduleActivityTaskEvent, - scheduleEvent2, - ] as HistoryEvent[]); - await waitForProcessing(onChange); - - // Both groups should now exist - groups = grouper.getGroups(); - expect(groups['7']).toBeDefined(); - expect(groups['10']).toBeDefined(); - expect((groups['7'] as ActivityHistoryGroup).events).toHaveLength(2); - expect((groups['10'] as ActivityHistoryGroup).events).toHaveLength(2); - }); + // Group should NOT exist yet (pending event is buffered) + let groups = grouper.getGroups(); + expect(groups['2']).toBeUndefined(); + + // Now add the scheduled event + grouper.updateEvents([createScheduleDecisionEvent('2')]); + await waitForProcessing(); - it('should clear buffer when pending events are updated', async () => { - const { grouper } = createGrouper(); - - const pendingActivity1 = { - ...pendingActivityTaskStartEvent, - computedEventId: 'pending-7', - } as const satisfies PendingActivityTaskStartEvent; - - const pendingActivity2 = { - ...pendingActivityTaskStartEvent, - computedEventId: 'pending-10', - pendingActivityTaskStartEventAttributes: { - ...pendingActivityTaskStartEvent.pendingActivityTaskStartEventAttributes, - scheduleId: '10', - }, - } as const satisfies PendingActivityTaskStartEvent; - - // Add pending activity that won't have a group - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivity1], - pendingStartDecision: null, - }); - - // Update with different pending activity (old one should be removed from buffer) - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivity2], - pendingStartDecision: null, - }); - - // No groups should exist - expect(Object.keys(grouper.getGroups()).length).toBe(0); + // Group should now exist with both scheduled and pending events + groups = grouper.getGroups(); + const decisionGroup = groups['2']; + expect(decisionGroup).toBeDefined(); + expect(decisionGroup.groupType).toBe('Decision'); + expect(decisionGroup.events).toHaveLength(2); + }); + + it('should handle multiple buffered pending activities', async () => { + const { grouper, waitForProcessing } = setup(); + + // Add multiple pending activities BEFORE their scheduled events + await grouper.updatePendingEvents({ + pendingStartActivities: [ + createPendingActivity('7'), + createPendingActivity('10', { activityId: '1' }), + ], + pendingStartDecision: null, }); - it('should clear buffer on reset', async () => { - const { grouper, onChange } = createGrouper(); - - // Add pending activity without scheduled event (will be buffered) - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); - - // Reset the grouper - grouper.reset(); - - // Add scheduled event after reset - grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - // Group should only have scheduled event (buffered pending was cleared) - const groups = grouper.getGroups(); - const activityGroup = groups['7'] as ActivityHistoryGroup; - expect(activityGroup.events).toHaveLength(1); - expect(activityGroup.events[0].attributes).toBe( - 'activityTaskScheduledEventAttributes' - ); + // No groups should exist yet + expect(Object.keys(grouper.getGroups()).length).toBe(0); + + // Add first scheduled event + grouper.updateEvents([createScheduleActivityEvent('7')]); + await waitForProcessing(); + + // First group should now exist + let groups = grouper.getGroups(); + expect(groups['7']).toBeDefined(); + expect(groups['10']).toBeUndefined(); + + // Add second scheduled event + grouper.updateEvents([ + createScheduleActivityEvent('7'), + createScheduleActivityEvent('10', { activityId: '1' }), + ]); + await waitForProcessing(); + + // Both groups should now exist + groups = grouper.getGroups(); + expect(groups['7']).toBeDefined(); + expect(groups['10']).toBeDefined(); + expect(groups['7'].events).toHaveLength(2); + expect(groups['10'].events).toHaveLength(2); + }); + + it('should clear pending activities buffer when pending events are updated', async () => { + const { grouper, waitForProcessing } = setup(); + + // Buffer first pending activity for scheduleId: '7' + await grouper.updatePendingEvents({ + pendingStartActivities: [createPendingActivity('7')], + pendingStartDecision: null, }); - it('should apply buffered pending events after updatePendingEvents if groups now exist', async () => { - const { grouper, onChange } = createGrouper(); - - // Add pending activity BEFORE scheduled event (will be buffered) - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); - - // No group yet - expect(grouper.getGroups()['7']).toBeUndefined(); - - // Process scheduled event - grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - // Call updatePendingEvents again with same pending activity - // This should trigger applyBufferedPendingEvents and merge the buffered event - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); - - // Group should now have both events - const groups = grouper.getGroups(); - const activityGroup = groups['7'] as ActivityHistoryGroup; - expect(activityGroup.events).toHaveLength(2); + // Update with different pending activity for scheduleId: '10' (old one should be removed from buffer) + await grouper.updatePendingEvents({ + pendingStartActivities: [ + createPendingActivity('10', { activityId: '1' }), + ], + pendingStartDecision: null, }); - it('should handle scenario where scheduled event arrives after pending event update', async () => { - const { grouper, onChange } = createGrouper(); - - // Step 1: Pending activity arrives first (buffered) - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); - - // Step 2: Scheduled event arrives - grouper.updateEvents([scheduleActivityTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - // Step 3: Another updatePendingEvents call (maybe with different pending events) - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); - - // Should have complete group with both events - const groups = grouper.getGroups(); - const activityGroup = groups['7'] as ActivityHistoryGroup; - expect(activityGroup).toBeDefined(); - expect(activityGroup.events).toHaveLength(2); - expect(activityGroup.events[0].attributes).toBe( - 'activityTaskScheduledEventAttributes' - ); - expect(activityGroup.events[1].attributes).toBe( - 'pendingActivityTaskStartEventAttributes' - ); + // No groups should exist yet (still buffered) + expect(Object.keys(grouper.getGroups()).length).toBe(0); + + // Now add scheduled events for both activities + grouper.updateEvents([ + createScheduleActivityEvent('7'), // scheduleId: '7' + createScheduleActivityEvent('10', { activityId: '1' }), // scheduleId: '10' + ]); + await waitForProcessing(); + + const groups = grouper.getGroups(); + + // Group '7' should only have scheduled event (pending was cleared from buffer) + expect(groups['7']).toBeDefined(); + expect(groups['7'].events).toHaveLength(1); + expect( + groups['7'].events.some( + (e) => e.attributes === 'pendingActivityTaskStartEventAttributes' + ) + ).toBe(false); + + // Group '10' should have both scheduled and pending events (current pending in buffer) + expect(groups['10']).toBeDefined(); + expect(groups['10'].events).toHaveLength(2); + expect( + groups['10'].events.some( + (e) => e.attributes === 'pendingActivityTaskStartEventAttributes' + ) + ).toBe(true); + }); + + it('should clear buffer on reset', async () => { + const { grouper, waitForProcessing } = setup(); + + // Add pending activity without scheduled event (will be buffered) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, }); - it('should not create incomplete groups when pending arrives before scheduled', async () => { - const { grouper } = createGrouper(); + // Reset the grouper + grouper.reset(); + + // Add scheduled event after reset + grouper.updateEvents([createScheduleActivityEvent('7')]); + await waitForProcessing(); + + // Group should only have scheduled event (buffered pending was cleared) + const groups = grouper.getGroups(); + const activityGroup = groups['7']; + expect(activityGroup.events).toHaveLength(1); + expect(activityGroup.events[0].attributes).toBe( + 'activityTaskScheduledEventAttributes' + ); + }); - // Only add pending activity (no scheduled event) - await grouper.updatePendingEvents({ - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }); + it('should apply buffered pending events after updatePendingEvents if groups now exist', async () => { + const { grouper, waitForProcessing } = setup(); - // Group should NOT exist in the UI - const groups = grouper.getGroups(); - expect(groups['7']).toBeUndefined(); - expect(Object.keys(groups).length).toBe(0); + // Add pending activity BEFORE scheduled event (will be buffered) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, }); - it('should handle pending decision buffer clearing when decision changes', async () => { - const { grouper } = createGrouper(); - - const pendingDecision1 = { - ...pendingDecisionTaskStartEvent, - computedEventId: 'pending-7', - } as const satisfies PendingDecisionTaskStartEvent; - - const pendingDecision2 = { - ...pendingDecisionTaskStartEvent, - computedEventId: 'pending-10', - pendingDecisionTaskStartEventAttributes: { - ...pendingDecisionTaskStartEvent.pendingDecisionTaskStartEventAttributes, - scheduleId: '10', - }, - } as const satisfies PendingDecisionTaskStartEvent; - - // Buffer first decision - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: pendingDecision1, - }); - - // Update with different decision (old one should be removed from buffer) - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: pendingDecision2, - }); - - // No groups should exist - expect(Object.keys(grouper.getGroups()).length).toBe(0); + // No group yet + expect(grouper.getGroups()['7']).toBeUndefined(); + + // Process scheduled event + grouper.updateEvents([createScheduleActivityEvent('7')]); + await waitForProcessing(); + + // Call updatePendingEvents again with same pending activity + // This should trigger applyBufferedPendingEvents and merge the buffered event + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, }); + + // Group should now have both events + const groups = grouper.getGroups(); + const activityGroup = groups['7']; + expect(activityGroup.events).toHaveLength(2); }); - describe('decision group filtering', () => { - it('should filter out pending decision when decision group has more than 2 events', async () => { - const { grouper, onChange } = createGrouper(); - - // Add scheduled event and pending decision - grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: pendingDecisionForScheduledEvent, - }); - - // Group should have 2 events (scheduled + pending) - let groups = grouper.getGroups(); - expect(groups['2'].events).toHaveLength(2); - - // Now add started event (makes it 3 events total) - onChange.mockClear(); - grouper.updateEvents([ - scheduleDecisionTaskEvent, - startDecisionTaskEvent, - ] as HistoryEvent[]); - await waitForProcessing(onChange); - - // Pending decision should be filtered out when there are more than 2 events - groups = grouper.getGroups(); - expect(groups['2'].events).toHaveLength(2); - expect( - groups['2'].events.some( - (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' - ) - ).toBe(false); - - // Add completed event (makes it 3+ events) - onChange.mockClear(); - grouper.updateEvents(completedDecisionTaskEvents as HistoryEvent[]); - await waitForProcessing(onChange); - - // Still should not have pending decision - groups = grouper.getGroups(); - expect(groups['2'].events).toHaveLength(3); - expect( - groups['2'].events.some( - (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' - ) - ).toBe(false); + it('should handle scenario where scheduled event arrives after pending event update', async () => { + const { grouper, waitForProcessing } = setup(); + + // Step 1: Pending activity arrives first (buffered) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, }); - it('should keep pending decision when decision group has exactly 2 events', async () => { - const { grouper, onChange } = createGrouper(); - - // Add scheduled event and pending decision - grouper.updateEvents([scheduleDecisionTaskEvent] as HistoryEvent[]); - await waitForProcessing(onChange); - - await grouper.updatePendingEvents({ - pendingStartActivities: [], - pendingStartDecision: pendingDecisionForScheduledEvent, - }); - - // Group should have 2 events (scheduled + pending) - const groups = grouper.getGroups(); - expect(groups['2'].events).toHaveLength(2); - expect( - groups['2'].events.some( - (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' - ) - ).toBe(true); + // Step 2: Scheduled event arrives + grouper.updateEvents([createScheduleActivityEvent('7')]); + await waitForProcessing(); + + // Should have complete group with both events + const groups = grouper.getGroups(); + const activityGroup = groups['7']; + expect(activityGroup).toBeDefined(); + expect(activityGroup.events).toHaveLength(2); + expect(activityGroup.events[0].attributes).toBe( + 'activityTaskScheduledEventAttributes' + ); + expect(activityGroup.events[1].attributes).toBe( + 'pendingActivityTaskStartEventAttributes' + ); + }); + + it('should not create incomplete groups when pending arrives before scheduled', async () => { + const { grouper } = setup(); + + // Only add pending activity (no scheduled event) + await grouper.updatePendingEvents({ + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, }); + + // Group should NOT exist in the UI + const groups = grouper.getGroups(); + expect(groups['7']).toBeUndefined(); + expect(Object.keys(groups).length).toBe(0); }); - describe('groups shallow copy in onChange', () => { - it('should return shallow copy of groups object in onChange callback', async () => { - const { grouper, onChange } = createGrouper(); + it('should handle pending decision buffer clearing when decision changes', async () => { + const { grouper, waitForProcessing } = setup(); - grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); - await waitForProcessing(onChange); + // Buffer first decision for scheduleId: '2' + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: createPendingDecision('2'), + }); - // Get groups from onChange callback - const lastCall = onChange.mock.calls[onChange.mock.calls.length - 1]; - const groupsFromCallback = lastCall[0].currentGroups; + // Update with different decision for scheduleId: '10' (old one should be removed from buffer) + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: createPendingDecision('10'), + }); - // Try to add a new group to the callback's groups object - groupsFromCallback['999'] = groupsFromCallback['7']; + // No groups should exist yet (still buffered) + expect(Object.keys(grouper.getGroups()).length).toBe(0); + + // Now add scheduled events for both decisions + grouper.updateEvents([ + createScheduleDecisionEvent('2'), // scheduleId: '2' + createScheduleDecisionEvent('10'), // scheduleId: '10' + ]); + await waitForProcessing(); + + const groups = grouper.getGroups(); + + // Group '2' should only have scheduled event (pending was cleared from buffer) + expect(groups['2']).toBeDefined(); + expect(groups['2'].events).toHaveLength(1); + expect( + groups['2'].events.some( + (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' + ) + ).toBe(false); + + // Group '10' should have both scheduled and pending events (current pending in buffer) + expect(groups['10']).toBeDefined(); + expect(groups['10'].events).toHaveLength(2); + expect( + groups['10'].events.some( + (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' + ) + ).toBe(true); + }); + + it('should filter out pending decision when decision group has more than 2 events', async () => { + const { grouper, waitForProcessing } = setup(); + + // Add scheduled event and pending decision + grouper.updateEvents([createScheduleDecisionEvent('2')]); + await waitForProcessing(); - // Internal groups should not have the new group (shallow copy protects object structure) - const internalGroups = grouper.getGroups(); - expect(internalGroups['999']).toBeUndefined(); + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: createPendingDecision('2'), }); - it('should allow modification of group properties (shallow copy limitation)', async () => { - const { grouper, onChange } = createGrouper(); + // Group should have 2 events (scheduled + pending) + let groups = grouper.getGroups(); + expect(groups['2'].events).toHaveLength(2); + + // Now add started event (makes it 3 events total) + grouper.updateEvents([ + createScheduleDecisionEvent('2'), + startDecisionTaskEvent, + ]); + await waitForProcessing(); + + // Pending decision should be filtered out when there are more than 2 events + groups = grouper.getGroups(); + expect(groups['2'].events).toHaveLength(2); + expect( + groups['2'].events.some( + (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' + ) + ).toBe(false); + + // even if pending event is updated again, it should not be added to the group + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: createPendingDecision('2'), + }); - grouper.updateEvents(completedActivityTaskEvents as HistoryEvent[]); - await waitForProcessing(onChange); + groups = grouper.getGroups(); + expect(groups['2'].events).toHaveLength(2); + expect( + groups['2'].events.some( + (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' + ) + ).toBe(false); + }); - // Get groups from onChange callback - const lastCall = onChange.mock.calls[onChange.mock.calls.length - 1]; - const groupsFromCallback = lastCall[0].currentGroups; - const originalLabel = groupsFromCallback['7'].label; + it('should keep pending decision when decision group has exactly 2 events', async () => { + const { grouper, waitForProcessing } = setup(); - // Modify a group's property - this WILL affect internal state (shallow copy limitation) - groupsFromCallback['7'].label = 'Modified Label'; + // Add scheduled event and pending decision + grouper.updateEvents([createScheduleDecisionEvent('2')]); + await waitForProcessing(); - // Internal groups ARE modified since group objects are shared references - const internalGroups = grouper.getGroups(); - expect(internalGroups['7'].label).toBe('Modified Label'); - expect(internalGroups['7'].label).not.toBe(originalLabel); + await grouper.updatePendingEvents({ + pendingStartActivities: [], + pendingStartDecision: createPendingDecision('2'), }); + + // Group should have 2 events (scheduled + pending) + const groups = grouper.getGroups(); + expect(groups['2'].events).toHaveLength(2); + expect( + groups['2'].events.some( + (e) => e.attributes === 'pendingDecisionTaskStartEventAttributes' + ) + ).toBe(true); }); }); + +function setup(options: Partial = {}) { + // Queue of promise resolvers/rejecters waiting for processing to complete + const pendingResolvers: Array<{ + resolve: () => void; + reject: (error: Error) => void; + timeoutId: NodeJS.Timeout; + }> = []; + + // Create onChange mock that resolves pending promises when processing completes + const onChange: jest.MockedFunction = jest.fn( + (state) => { + if (state.status === 'idle') { + // Resolve all pending promises at once + pendingResolvers.forEach(({ timeoutId, resolve }) => { + clearTimeout(timeoutId); + resolve(); + }); + pendingResolvers.length = 0; + } + } + ); + + // Create grouper with onChange and any additional options + const grouper = new WorkflowHistoryGrouper({ + onChange, + ...options, + }); + + // Helper function to wait for next processing cycle + const waitForProcessing = async (timeout = 1000): Promise => { + await new Promise((resolve, reject) => { + const timeoutId = setTimeout(() => { + // Remove this resolver from queue if it times out + const index = pendingResolvers.findIndex( + (r) => r.timeoutId === timeoutId + ); + if (index !== -1) { + pendingResolvers.splice(index, 1); + } + reject(new Error('Timeout waiting for processing to complete')); + }, timeout); + + pendingResolvers.push({ resolve, reject, timeoutId }); + }); + }; + + // Cleanup function to clear any pending timeouts + const cleanup = () => { + pendingResolvers.forEach(({ timeoutId }) => clearTimeout(timeoutId)); + pendingResolvers.length = 0; + }; + + // Register cleanup automatically + allCleanups.push(cleanup); + + return { grouper, onChange, waitForProcessing }; +} diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.ts b/src/views/workflow-history/helpers/workflow-history-grouper.ts index 51cb2638b..4e542efcb 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.ts @@ -161,11 +161,8 @@ export default class WorkflowHistoryGrouper { * Schedules the next batch using the best available API. * Uses Scheduler API if available, otherwise falls back to Promise microtask. */ - private scheduleNextBatch(): void { - // Check if Scheduler API is available - const useScheduler = true; + private scheduleNextBatch() { if ( - useScheduler && typeof window !== 'undefined' && 'scheduler' in window && 'postTask' in (window.scheduler as any) @@ -187,7 +184,7 @@ export default class WorkflowHistoryGrouper { * Processes a single batch of events (or all remaining events if no batchSize). * This method handles the core grouping logic and schedules itself for the next batch. */ - private async processBatch(): Promise { + private processBatch(): void { // Check if there are events to process if (this.lastProcessedEventIndex >= this.allEvents.length - 1) { this.isProcessing = false; diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts index 1674624dd..8c075cd3d 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts @@ -19,7 +19,7 @@ export type ProcessEventsResult = { /** * Processing status for incremental grouping operations. */ -export type ProcessingStatus = 'idle' | 'processing' | 'error'; +export type ProcessingStatus = 'idle' | 'processing'; /** * State snapshot of the grouping process. From af7836654ffb02806a03cc1400689db0b26c37f1 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 6 Nov 2025 00:46:49 +0100 Subject: [PATCH 09/58] change the api of onChange and add destroy method Signed-off-by: Assem Hafez --- .../workflow-history-grouper.test.tsx | 47 ++++++++++++++----- .../helpers/workflow-history-grouper.ts | 33 ++++++++++--- .../helpers/workflow-history-grouper.types.ts | 7 --- 3 files changed, 62 insertions(+), 25 deletions(-) diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx index 231644afb..4eb20f616 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx @@ -587,6 +587,31 @@ describe(WorkflowHistoryGrouper.name, () => { ) ).toBe(true); }); + + it('should clean up all resources when destroy is called', async () => { + const { grouper, handleStateChange, waitForProcessing } = setup(); + + // Process some events and verify onChange is called + grouper.updateEvents(completedActivityTaskEvents); + await waitForProcessing(); + + expect(handleStateChange).toHaveBeenCalled(); + expect(Object.keys(grouper.getGroups()).length).toBeGreaterThan(0); + + handleStateChange.mockClear(); + // Destroy the grouper + grouper.destroy(); + + // Verify state is reset + expect(grouper.getGroups()).toEqual({}); + expect(grouper.getLastProcessedEventIndex()).toBe(-1); + + // Process new events - onChange should NOT be called anymore + grouper.updateEvents(completedActivityTaskEvents); + + // Verify onChange was NOT called after destroy + expect(handleStateChange).not.toHaveBeenCalled(); + }); }); function setup(options: Partial = {}) { @@ -597,9 +622,9 @@ function setup(options: Partial = {}) { timeoutId: NodeJS.Timeout; }> = []; - // Create onChange mock that resolves pending promises when processing completes - const onChange: jest.MockedFunction = jest.fn( - (state) => { + // Create state change handler that resolves pending promises when processing completes + const handleStateChange: jest.MockedFunction = + jest.fn((state) => { if (state.status === 'idle') { // Resolve all pending promises at once pendingResolvers.forEach(({ timeoutId, resolve }) => { @@ -608,14 +633,11 @@ function setup(options: Partial = {}) { }); pendingResolvers.length = 0; } - } - ); + }); - // Create grouper with onChange and any additional options - const grouper = new WorkflowHistoryGrouper({ - onChange, - ...options, - }); + // Create grouper and subscribe to state changes + const grouper = new WorkflowHistoryGrouper(options); + grouper.onChange(handleStateChange); // Helper function to wait for next processing cycle const waitForProcessing = async (timeout = 1000): Promise => { @@ -635,14 +657,15 @@ function setup(options: Partial = {}) { }); }; - // Cleanup function to clear any pending timeouts + // Cleanup function to clear any pending timeouts and unsubscribe const cleanup = () => { pendingResolvers.forEach(({ timeoutId }) => clearTimeout(timeoutId)); pendingResolvers.length = 0; + grouper.destroy(); }; // Register cleanup automatically allCleanups.push(cleanup); - return { grouper, onChange, waitForProcessing }; + return { grouper, handleStateChange, waitForProcessing }; } diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.ts b/src/views/workflow-history/helpers/workflow-history-grouper.ts index 4e542efcb..3dea5c3d6 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.ts @@ -46,7 +46,7 @@ export default class WorkflowHistoryGrouper { private groups: HistoryEventsGroups = {}; private currentPendingActivities: PendingActivityTaskStartEvent[] = []; private currentPendingDecision: PendingDecisionTaskStartEvent | null = null; - private onChange: (state: GroupingProcessState) => void; + private subscribers: Set<(state: GroupingProcessState) => void> = new Set(); private batchSize?: number; private isProcessing: boolean = false; @@ -54,11 +54,21 @@ export default class WorkflowHistoryGrouper { private bufferedPendingActivities: PendingActivityTaskStartEvent[] = []; private bufferedPendingDecision: PendingDecisionTaskStartEvent | null = null; - constructor({ onChange, batchSize }: Props) { - this.onChange = onChange; + constructor({ batchSize }: Props = {}) { this.batchSize = batchSize; } + /** + * Subscribe to state changes. + * Returns an unsubscribe function. + */ + public onChange(callback: (state: GroupingProcessState) => void): () => void { + this.subscribers.add(callback); + return () => { + this.subscribers.delete(callback); + }; + } + /** * Updates the events list and automatically starts processing. * The processor will continue batch by batch until all events are processed. @@ -118,6 +128,16 @@ export default class WorkflowHistoryGrouper { this.isProcessing = false; } + /** + * Destroys the grouper, cleaning up all resources. + * Clears all subscribers and resets internal state. + * Call this when the grouper is no longer needed. + */ + public destroy(): void { + this.subscribers.clear(); + this.reset(); + } + /** * Gets the current groups without processing new events. * @@ -212,13 +232,14 @@ export default class WorkflowHistoryGrouper { const processedEventsCount = this.lastProcessedEventIndex + 1; const remainingEventsCount = this.allEvents.length - processedEventsCount; - // Report progress - this.onChange({ + // Report progress to all subscribers + const state: GroupingProcessState = { currentGroups: { ...this.groups }, processedEventsCount, remainingEventsCount, status: remainingEventsCount > 0 ? 'processing' : 'idle', - }); + }; + this.subscribers.forEach((callback) => callback(state)); // Check if there are more events to process if (this.lastProcessedEventIndex < this.allEvents.length - 1) { diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts index 8c075cd3d..b4ba1492a 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts @@ -41,13 +41,6 @@ export type GroupingProcessState = { export type GroupingStateChangeCallback = (state: GroupingProcessState) => void; export type Props = { - /** - * Callback invoked when grouping state changes. - * Provides real-time updates on processing progress. - * Required to receive state updates. - */ - onChange: GroupingStateChangeCallback; - /** * Batch size for incremental processing. * If specified, events will be processed in batches to allow progress updates. From 0539afee8554a6c5dd7e7b9382339c99653fe885 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 6 Nov 2025 01:47:12 +0100 Subject: [PATCH 10/58] replace getGroups with getState Signed-off-by: Assem Hafez --- .../workflow-history-grouper.test.tsx | 108 ++++++++++++------ .../helpers/workflow-history-grouper.ts | 46 ++++---- .../helpers/workflow-history-grouper.types.ts | 2 +- 3 files changed, 97 insertions(+), 59 deletions(-) diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx index 4eb20f616..4a226b65f 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx @@ -37,7 +37,7 @@ describe(WorkflowHistoryGrouper.name, () => { grouper.updateEvents(completedActivityTaskEvents); await waitForProcessing(); - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; expect(groups).toBeDefined(); expect(groups['7']).toBeDefined(); expect(groups['7'].groupType).toBe('Activity'); @@ -77,7 +77,7 @@ describe(WorkflowHistoryGrouper.name, () => { pendingStartDecision: null, }); - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; const activityGroup = groups['7']; expect(activityGroup.events).toHaveLength(2); expect(activityGroup.events[1].attributes).toBe( @@ -98,7 +98,7 @@ describe(WorkflowHistoryGrouper.name, () => { pendingStartDecision: createPendingDecision('2'), }); - const decisionGroup = grouper.getGroups()['2']; + const decisionGroup = grouper.getState().groups['2']; expect(decisionGroup.groupType).toBe('Decision'); expect(decisionGroup.events).toHaveLength(2); }); @@ -115,7 +115,7 @@ describe(WorkflowHistoryGrouper.name, () => { pendingStartDecision: null, }); - const firstGroups = grouper.getGroups(); + const firstGroups = grouper.getState().groups; const firstActivityGroup = firstGroups['7']; expect(firstActivityGroup.events).toHaveLength(2); @@ -125,7 +125,7 @@ describe(WorkflowHistoryGrouper.name, () => { pendingStartDecision: null, }); - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; const activityGroup = groups['7']; expect(activityGroup.events).toHaveLength(1); expect(activityGroup.events[0].attributes).toBe( @@ -145,7 +145,7 @@ describe(WorkflowHistoryGrouper.name, () => { pendingStartDecision: createPendingDecision('2'), }); - const firstGroups = grouper.getGroups(); + const firstGroups = grouper.getState().groups; expect(firstGroups['2'].events).toHaveLength(2); // Second call without pending decision (it completed) @@ -154,7 +154,7 @@ describe(WorkflowHistoryGrouper.name, () => { pendingStartDecision: null, }); - const decisionGroup = grouper.getGroups()['2']; + const decisionGroup = grouper.getState().groups['2']; expect(decisionGroup.events).toHaveLength(1); }); @@ -183,7 +183,7 @@ describe(WorkflowHistoryGrouper.name, () => { pendingStartDecision: null, }); - const activityGroup = grouper.getGroups()['7']; + const activityGroup = grouper.getState().groups['7']; expect(activityGroup.events).toHaveLength(2); expect( activityGroup.events.some( @@ -195,7 +195,7 @@ describe(WorkflowHistoryGrouper.name, () => { it('should return current groups without processing', () => { const { grouper } = setup(); - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; expect(groups).toEqual({}); }); @@ -208,13 +208,13 @@ describe(WorkflowHistoryGrouper.name, () => { await waitForProcessing(); expect(grouper.getLastProcessedEventIndex()).toBe(2); - expect(Object.keys(grouper.getGroups()).length).toBeGreaterThan(0); + expect(Object.keys(grouper.getState().groups).length).toBeGreaterThan(0); // Reset grouper.reset(); expect(grouper.getLastProcessedEventIndex()).toBe(-1); - expect(grouper.getGroups()).toEqual({}); + expect(grouper.getState().groups).toEqual({}); }); it('should reprocess events after reset', async () => { @@ -224,16 +224,16 @@ describe(WorkflowHistoryGrouper.name, () => { grouper.updateEvents(completedActivityTaskEvents); await waitForProcessing(); - const firstGroups = grouper.getGroups(); + const firstGroups = grouper.getState().groups; // Reset and reprocess grouper.reset(); - expect(grouper.getGroups()).toEqual({}); + expect(grouper.getState().groups).toEqual({}); grouper.updateEvents(completedActivityTaskEvents); await waitForProcessing(); - expect(grouper.getGroups()).toEqual(firstGroups); + expect(grouper.getState().groups).toEqual(firstGroups); }); it('should buffer pending activity when group does not exist yet', async () => { @@ -246,7 +246,7 @@ describe(WorkflowHistoryGrouper.name, () => { }); // Group should NOT exist yet (pending event is buffered) - let groups = grouper.getGroups(); + let groups = grouper.getState().groups; expect(groups['7']).toBeUndefined(); // Now add the scheduled event @@ -254,7 +254,7 @@ describe(WorkflowHistoryGrouper.name, () => { await waitForProcessing(); // Group should now exist with both scheduled and pending events - groups = grouper.getGroups(); + groups = grouper.getState().groups; const activityGroup = groups['7']; expect(activityGroup).toBeDefined(); expect(activityGroup.events).toHaveLength(2); @@ -276,7 +276,7 @@ describe(WorkflowHistoryGrouper.name, () => { }); // Group should NOT exist yet (pending event is buffered) - let groups = grouper.getGroups(); + let groups = grouper.getState().groups; expect(groups['2']).toBeUndefined(); // Now add the scheduled event @@ -284,7 +284,7 @@ describe(WorkflowHistoryGrouper.name, () => { await waitForProcessing(); // Group should now exist with both scheduled and pending events - groups = grouper.getGroups(); + groups = grouper.getState().groups; const decisionGroup = groups['2']; expect(decisionGroup).toBeDefined(); expect(decisionGroup.groupType).toBe('Decision'); @@ -304,14 +304,14 @@ describe(WorkflowHistoryGrouper.name, () => { }); // No groups should exist yet - expect(Object.keys(grouper.getGroups()).length).toBe(0); + expect(Object.keys(grouper.getState().groups).length).toBe(0); // Add first scheduled event grouper.updateEvents([createScheduleActivityEvent('7')]); await waitForProcessing(); // First group should now exist - let groups = grouper.getGroups(); + let groups = grouper.getState().groups; expect(groups['7']).toBeDefined(); expect(groups['10']).toBeUndefined(); @@ -323,7 +323,7 @@ describe(WorkflowHistoryGrouper.name, () => { await waitForProcessing(); // Both groups should now exist - groups = grouper.getGroups(); + groups = grouper.getState().groups; expect(groups['7']).toBeDefined(); expect(groups['10']).toBeDefined(); expect(groups['7'].events).toHaveLength(2); @@ -348,7 +348,7 @@ describe(WorkflowHistoryGrouper.name, () => { }); // No groups should exist yet (still buffered) - expect(Object.keys(grouper.getGroups()).length).toBe(0); + expect(Object.keys(grouper.getState().groups).length).toBe(0); // Now add scheduled events for both activities grouper.updateEvents([ @@ -357,7 +357,7 @@ describe(WorkflowHistoryGrouper.name, () => { ]); await waitForProcessing(); - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; // Group '7' should only have scheduled event (pending was cleared from buffer) expect(groups['7']).toBeDefined(); @@ -395,7 +395,7 @@ describe(WorkflowHistoryGrouper.name, () => { await waitForProcessing(); // Group should only have scheduled event (buffered pending was cleared) - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; const activityGroup = groups['7']; expect(activityGroup.events).toHaveLength(1); expect(activityGroup.events[0].attributes).toBe( @@ -413,7 +413,7 @@ describe(WorkflowHistoryGrouper.name, () => { }); // No group yet - expect(grouper.getGroups()['7']).toBeUndefined(); + expect(grouper.getState().groups['7']).toBeUndefined(); // Process scheduled event grouper.updateEvents([createScheduleActivityEvent('7')]); @@ -427,7 +427,7 @@ describe(WorkflowHistoryGrouper.name, () => { }); // Group should now have both events - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; const activityGroup = groups['7']; expect(activityGroup.events).toHaveLength(2); }); @@ -446,7 +446,7 @@ describe(WorkflowHistoryGrouper.name, () => { await waitForProcessing(); // Should have complete group with both events - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; const activityGroup = groups['7']; expect(activityGroup).toBeDefined(); expect(activityGroup.events).toHaveLength(2); @@ -468,7 +468,7 @@ describe(WorkflowHistoryGrouper.name, () => { }); // Group should NOT exist in the UI - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; expect(groups['7']).toBeUndefined(); expect(Object.keys(groups).length).toBe(0); }); @@ -489,7 +489,7 @@ describe(WorkflowHistoryGrouper.name, () => { }); // No groups should exist yet (still buffered) - expect(Object.keys(grouper.getGroups()).length).toBe(0); + expect(Object.keys(grouper.getState().groups).length).toBe(0); // Now add scheduled events for both decisions grouper.updateEvents([ @@ -498,7 +498,7 @@ describe(WorkflowHistoryGrouper.name, () => { ]); await waitForProcessing(); - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; // Group '2' should only have scheduled event (pending was cleared from buffer) expect(groups['2']).toBeDefined(); @@ -532,7 +532,7 @@ describe(WorkflowHistoryGrouper.name, () => { }); // Group should have 2 events (scheduled + pending) - let groups = grouper.getGroups(); + let groups = grouper.getState().groups; expect(groups['2'].events).toHaveLength(2); // Now add started event (makes it 3 events total) @@ -543,7 +543,7 @@ describe(WorkflowHistoryGrouper.name, () => { await waitForProcessing(); // Pending decision should be filtered out when there are more than 2 events - groups = grouper.getGroups(); + groups = grouper.getState().groups; expect(groups['2'].events).toHaveLength(2); expect( groups['2'].events.some( @@ -557,7 +557,7 @@ describe(WorkflowHistoryGrouper.name, () => { pendingStartDecision: createPendingDecision('2'), }); - groups = grouper.getGroups(); + groups = grouper.getState().groups; expect(groups['2'].events).toHaveLength(2); expect( groups['2'].events.some( @@ -579,7 +579,7 @@ describe(WorkflowHistoryGrouper.name, () => { }); // Group should have 2 events (scheduled + pending) - const groups = grouper.getGroups(); + const groups = grouper.getState().groups; expect(groups['2'].events).toHaveLength(2); expect( groups['2'].events.some( @@ -596,14 +596,14 @@ describe(WorkflowHistoryGrouper.name, () => { await waitForProcessing(); expect(handleStateChange).toHaveBeenCalled(); - expect(Object.keys(grouper.getGroups()).length).toBeGreaterThan(0); + expect(Object.keys(grouper.getState().groups).length).toBeGreaterThan(0); handleStateChange.mockClear(); // Destroy the grouper grouper.destroy(); // Verify state is reset - expect(grouper.getGroups()).toEqual({}); + expect(grouper.getState().groups).toEqual({}); expect(grouper.getLastProcessedEventIndex()).toBe(-1); // Process new events - onChange should NOT be called anymore @@ -612,6 +612,42 @@ describe(WorkflowHistoryGrouper.name, () => { // Verify onChange was NOT called after destroy expect(handleStateChange).not.toHaveBeenCalled(); }); + + it('should return current state via getState', async () => { + const { grouper, waitForProcessing } = setup(); + + // Initial state - no events processed + let state = grouper.getState(); + expect(state).toEqual({ + groups: {}, + processedEventsCount: 0, + remainingEventsCount: 0, + status: 'idle', + }); + + // Add events but don't wait - status should be processing + grouper.updateEvents(completedActivityTaskEvents); + state = grouper.getState(); + expect(state.status).toBe('processing'); + expect(state.remainingEventsCount).toBeGreaterThan(0); + + // Wait for processing to complete + await waitForProcessing(); + + // After processing - status should be idle + state = grouper.getState(); + expect(state).toEqual({ + groups: expect.any(Object), + processedEventsCount: completedActivityTaskEvents.length, + remainingEventsCount: 0, + status: 'idle', + }); + expect(Object.keys(state.groups).length).toBeGreaterThan(0); + + // Verify getState() returns consistent data + const anotherState = grouper.getState(); + expect(anotherState.groups).toEqual(state.groups); + }); }); function setup(options: Partial = {}) { diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.ts b/src/views/workflow-history/helpers/workflow-history-grouper.ts index 3dea5c3d6..7425bc6fd 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.ts @@ -139,19 +139,24 @@ export default class WorkflowHistoryGrouper { } /** - * Gets the current groups without processing new events. - * - * @returns Current state of groups + * Gets the index of the last processed event. */ - public getGroups(): HistoryEventsGroups { - return this.groups; + public getLastProcessedEventIndex(): number { + return this.lastProcessedEventIndex; } /** - * Gets the index of the last processed event. + * Gets the current state of the grouper. + * Returns current groups, processing status, and event counts. */ - public getLastProcessedEventIndex(): number { - return this.lastProcessedEventIndex; + public getState(): GroupingProcessState { + return { + groups: { ...this.groups }, + processedEventsCount: this.lastProcessedEventIndex + 1, + remainingEventsCount: + this.allEvents.length - this.lastProcessedEventIndex - 1, + status: this.isProcessing ? 'processing' : 'idle', + }; } // ============================================================================ @@ -228,25 +233,22 @@ export default class WorkflowHistoryGrouper { // Move pointer forward this.lastProcessedEventIndex = batchEnd - 1; - // Calculate progress - const processedEventsCount = this.lastProcessedEventIndex + 1; - const remainingEventsCount = this.allEvents.length - processedEventsCount; + // Check if there are more events to process + const hasMoreEvents = + this.lastProcessedEventIndex < this.allEvents.length - 1; + + // Update processing state before reporting to subscribers + if (!hasMoreEvents) { + this.isProcessing = false; + } // Report progress to all subscribers - const state: GroupingProcessState = { - currentGroups: { ...this.groups }, - processedEventsCount, - remainingEventsCount, - status: remainingEventsCount > 0 ? 'processing' : 'idle', - }; + const state = this.getState(); this.subscribers.forEach((callback) => callback(state)); - // Check if there are more events to process - if (this.lastProcessedEventIndex < this.allEvents.length - 1) { + // Schedule next batch if needed + if (hasMoreEvents) { this.scheduleNextBatch(); - } else { - // All done - this.isProcessing = false; } } diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts index b4ba1492a..45c4e44b9 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts @@ -26,7 +26,7 @@ export type ProcessingStatus = 'idle' | 'processing'; */ export type GroupingProcessState = { /** Current groups accumulated so far */ - currentGroups: HistoryEventsGroups; + groups: HistoryEventsGroups; /** Number of events that have been successfully processed since the grouper was created/reset */ processedEventsCount: number; /** Number of events that are still pending (not yet processed) */ From 6ff8c4e1fe947a4dd6ea89536981fba58295da9c Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 6 Nov 2025 02:07:22 +0100 Subject: [PATCH 11/58] init grouper hook Signed-off-by: Assem Hafez --- .../use-workflow-history-grouper.test.tsx | 413 ++++++++++++++++++ .../hooks/use-workflow-history-grouper.ts | 76 ++++ 2 files changed, 489 insertions(+) create mode 100644 src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx create mode 100644 src/views/workflow-history/hooks/use-workflow-history-grouper.ts diff --git a/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx b/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx new file mode 100644 index 000000000..6a2ace345 --- /dev/null +++ b/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx @@ -0,0 +1,413 @@ +import { renderHook, act } from '@testing-library/react'; + +import type { HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; + +import { + pendingActivityTaskStartEvent, + pendingDecisionTaskStartEvent, +} from '../../__fixtures__/workflow-history-pending-events'; +import HistoryEventsGrouper from '../../helpers/workflow-history-grouper'; +import type { + GroupingProcessState, + ProcessEventsParams, +} from '../../helpers/workflow-history-grouper.types'; +import useWorkflowHistoryGrouper from '../use-workflow-history-grouper'; + +// Mock the HistoryEventsGrouper +jest.mock('../../helpers/workflow-history-grouper'); + +// Mock useThrottledState to disable throttling in tests +jest.mock('@/hooks/use-throttled-state', () => { + const { useState } = jest.requireActual('react'); + return jest.fn((initialValue) => { + const [state, setState] = useState(initialValue); + const setStateWrapper = ( + callback: (prev: any) => any, + _executeImmediately?: boolean + ) => { + setState((prev: any) => callback(prev)); + }; + return [state, setStateWrapper]; + }); +}); + +describe(useWorkflowHistoryGrouper.name, () => { + let mockGrouper: jest.Mocked; + let mockOnChangeCallback: (state: GroupingProcessState) => void; + + const createMockState = ( + overrides?: Partial + ): GroupingProcessState => ({ + groups: {}, + processedEventsCount: 0, + remainingEventsCount: 0, + status: 'idle', + ...overrides, + }); + + beforeEach(() => { + // Reset the mock implementation before each test + mockOnChangeCallback = jest.fn(); + + mockGrouper = { + getState: jest.fn(), + onChange: jest.fn(), + updateEvents: jest.fn(), + updatePendingEvents: jest.fn(), + destroy: jest.fn(), + } as any; + + // Mock the constructor to return our mock grouper + ( + HistoryEventsGrouper as jest.MockedClass + ).mockImplementation(() => mockGrouper); + + // Default mock implementations + mockGrouper.getState.mockReturnValue(createMockState()); + mockGrouper.onChange.mockImplementation((callback) => { + mockOnChangeCallback = callback; + return jest.fn(); // Return unsubscribe function + }); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + + describe('initialization', () => { + it('should create HistoryEventsGrouper with default batchSize', () => { + renderHook(() => useWorkflowHistoryGrouper()); + + expect(HistoryEventsGrouper).toHaveBeenCalledWith({ + batchSize: 300, + }); + }); + + it('should initialize with state from grouper.getState()', () => { + const mockState = createMockState({ + groups: { group1: { groupType: 'Activity' } as any }, + processedEventsCount: 10, + }); + mockGrouper.getState.mockReturnValue(mockState); + + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + expect(mockGrouper.getState).toHaveBeenCalled(); + expect(result.current.eventGroups).toEqual(mockState.groups); + expect(result.current.groupingState).toEqual(mockState); + }); + + it('should subscribe to grouper onChange', () => { + renderHook(() => useWorkflowHistoryGrouper()); + + expect(mockGrouper.onChange).toHaveBeenCalledWith(expect.any(Function)); + }); + + it('should return empty groups when groupingState is null', () => { + mockGrouper.getState.mockReturnValue(null as any); + + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + expect(result.current.eventGroups).toEqual({}); + }); + }); + + describe('custom throttleMs', () => { + it('should accept custom throttle time', () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper(5000)); + + expect(result.current).toBeDefined(); + }); + }); + + describe('onChange subscription', () => { + it('should update groupingState when onChange callback is triggered', () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + const newState = createMockState({ + groups: { group1: { groupType: 'Decision' } as any }, + processedEventsCount: 5, + status: 'processing', + }); + + act(() => { + mockOnChangeCallback(newState); + }); + + expect(result.current.groupingState).toEqual(newState); + expect(result.current.eventGroups).toEqual(newState.groups); + expect(result.current.isProcessing).toBe(true); + }); + + it('should set isProcessing to false when status is idle', () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + const idleState = createMockState({ + status: 'idle', + }); + + act(() => { + mockOnChangeCallback(idleState); + }); + + expect(result.current.isProcessing).toBe(false); + }); + + it('should set isProcessing to true when status is processing', () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + const processingState = createMockState({ + status: 'processing', + }); + + act(() => { + mockOnChangeCallback(processingState); + }); + + expect(result.current.isProcessing).toBe(true); + }); + + it('should update state immediately when onChange is called', () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + const newState = createMockState({ + processedEventsCount: 50, + groups: { group1: { groupType: 'Activity' } as any }, + }); + + act(() => { + mockOnChangeCallback(newState); + }); + + expect(result.current.groupingState).toEqual(newState); + }); + }); + + describe('updateEvents', () => { + it('should call grouper.updateEvents with provided events', () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + const mockEvents: HistoryEvent[] = [ + { eventId: '1', eventTime: null } as HistoryEvent, + { eventId: '2', eventTime: null } as HistoryEvent, + ]; + + act(() => { + result.current.updateEvents(mockEvents); + }); + + expect(mockGrouper.updateEvents).toHaveBeenCalledWith(mockEvents); + }); + + it('should handle empty events array', () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + act(() => { + result.current.updateEvents([]); + }); + + expect(mockGrouper.updateEvents).toHaveBeenCalledWith([]); + }); + + it('should not throw if grouper is not initialized', () => { + // This shouldn't happen in practice, but test defensive coding + mockGrouper.updateEvents.mockImplementation(() => { + throw new Error('Grouper not initialized'); + }); + + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + expect(() => { + act(() => { + result.current.updateEvents([]); + }); + }).toThrow(); + }); + }); + + describe('updatePendingEvents', () => { + it('should call grouper.updatePendingEvents with provided params', async () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + const params: ProcessEventsParams = { + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: pendingDecisionTaskStartEvent, + }; + + await act(async () => { + await result.current.updatePendingEvents(params); + }); + + expect(mockGrouper.updatePendingEvents).toHaveBeenCalledWith(params); + }); + + it('should handle empty pending events', async () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + const params: ProcessEventsParams = { + pendingStartActivities: [], + pendingStartDecision: null, + }; + + await act(async () => { + await result.current.updatePendingEvents(params); + }); + + expect(mockGrouper.updatePendingEvents).toHaveBeenCalledWith(params); + }); + + it('should be async and await completion', async () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + let updateCompleted = false; + mockGrouper.updatePendingEvents.mockImplementation(async () => { + updateCompleted = true; + }); + + const params: ProcessEventsParams = { + pendingStartActivities: [], + pendingStartDecision: null, + }; + + await act(async () => { + await result.current.updatePendingEvents(params); + }); + + expect(updateCompleted).toBe(true); + }); + }); + + describe('cleanup', () => { + it('should unsubscribe from onChange on unmount', () => { + const mockUnsubscribe = jest.fn(); + mockGrouper.onChange.mockReturnValue(mockUnsubscribe); + + const { unmount } = renderHook(() => useWorkflowHistoryGrouper()); + + expect(mockUnsubscribe).not.toHaveBeenCalled(); + + unmount(); + + expect(mockUnsubscribe).toHaveBeenCalled(); + }); + + it('should call grouper.destroy on unmount', () => { + const { unmount } = renderHook(() => useWorkflowHistoryGrouper()); + + expect(mockGrouper.destroy).not.toHaveBeenCalled(); + + unmount(); + + expect(mockGrouper.destroy).toHaveBeenCalled(); + }); + + it('should handle multiple unmounts safely', () => { + const { unmount } = renderHook(() => useWorkflowHistoryGrouper()); + + unmount(); + + expect(mockGrouper.destroy).toHaveBeenCalledTimes(1); + + // Second unmount should not throw + expect(() => unmount()).not.toThrow(); + }); + }); + + describe('return values', () => { + it('should return correct shape of object', () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + expect(result.current).toEqual({ + eventGroups: expect.any(Object), + isProcessing: expect.any(Boolean), + groupingState: expect.any(Object), + updateEvents: expect.any(Function), + updatePendingEvents: expect.any(Function), + }); + }); + + it('should maintain stable function references', () => { + const { result, rerender } = renderHook(() => + useWorkflowHistoryGrouper() + ); + + const firstUpdateEvents = result.current.updateEvents; + const firstUpdatePendingEvents = result.current.updatePendingEvents; + + rerender(); + + expect(result.current.updateEvents).toBe(firstUpdateEvents); + expect(result.current.updatePendingEvents).toBe(firstUpdatePendingEvents); + }); + }); + + describe('integration scenarios', () => { + it('should handle rapid event updates', () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + const events1: HistoryEvent[] = [{ eventId: '1' } as HistoryEvent]; + const events2: HistoryEvent[] = [ + { eventId: '1' } as HistoryEvent, + { eventId: '2' } as HistoryEvent, + ]; + const events3: HistoryEvent[] = [ + { eventId: '1' } as HistoryEvent, + { eventId: '2' } as HistoryEvent, + { eventId: '3' } as HistoryEvent, + ]; + + act(() => { + result.current.updateEvents(events1); + result.current.updateEvents(events2); + result.current.updateEvents(events3); + }); + + expect(mockGrouper.updateEvents).toHaveBeenCalledTimes(3); + expect(mockGrouper.updateEvents).toHaveBeenLastCalledWith(events3); + }); + + it('should handle combined updates and state changes', async () => { + const { result } = renderHook(() => useWorkflowHistoryGrouper()); + + const mockEvents: HistoryEvent[] = [{ eventId: '1' } as HistoryEvent]; + const params: ProcessEventsParams = { + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: null, + }; + + act(() => { + result.current.updateEvents(mockEvents); + }); + + await act(async () => { + await result.current.updatePendingEvents(params); + }); + + const newState = createMockState({ + groups: { group1: { groupType: 'Activity' } as any }, + processedEventsCount: 1, + }); + + act(() => { + mockOnChangeCallback(newState); + }); + + expect(result.current.eventGroups).toEqual(newState.groups); + expect(mockGrouper.updateEvents).toHaveBeenCalledWith(mockEvents); + expect(mockGrouper.updatePendingEvents).toHaveBeenCalledWith(params); + }); + + it('should persist grouper instance across re-renders', () => { + const { rerender } = renderHook(() => useWorkflowHistoryGrouper()); + + expect(HistoryEventsGrouper).toHaveBeenCalledTimes(1); + + rerender(); + rerender(); + rerender(); + + // Constructor should only be called once + expect(HistoryEventsGrouper).toHaveBeenCalledTimes(1); + }); + }); +}); diff --git a/src/views/workflow-history/hooks/use-workflow-history-grouper.ts b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts new file mode 100644 index 000000000..a3ed136ef --- /dev/null +++ b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts @@ -0,0 +1,76 @@ +import { useCallback, useEffect, useRef } from 'react'; + +import type { HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; +import useThrottledState from '@/hooks/use-throttled-state'; + +import HistoryEventsGrouper from '../helpers/workflow-history-grouper'; +import type { + GroupingProcessState, + ProcessEventsParams, +} from '../helpers/workflow-history-grouper.types'; + +/** + * Hook for grouping workflow history events using the HistoryEventsGrouper. + */ +export default function useWorkflowHistoryGrouper(throttleMs = 2000) { + // Initialize the grouper once and persist across renders + const grouperRef = useRef(null); + + if (!grouperRef.current) { + grouperRef.current = new HistoryEventsGrouper({ + batchSize: 300, + }); + } + + // Track grouping state - updated internally during processing + const [groupingState, setGroupingState] = + useThrottledState( + grouperRef.current.getState(), + throttleMs + ); + + useEffect(() => { + if (!grouperRef.current) return; + + const unsubscribe = grouperRef.current.onChange((state) => { + const setImmediate = state.processedEventsCount < 300; + setGroupingState(() => state, setImmediate); + }); + + return () => unsubscribe(); + }, [setGroupingState]); + + useEffect(() => { + return () => { + grouperRef.current?.destroy(); + }; + }, []); + + // Expose updateEvents method (usually called automatically by effect) + const updateEvents = useCallback((newEvents: HistoryEvent[]) => { + if (!grouperRef.current) { + return; + } + + grouperRef.current.updateEvents(newEvents); + }, []); + + // Expose updatePendingEvents method + const updatePendingEvents = useCallback( + async (params: ProcessEventsParams) => { + if (!grouperRef.current) { + return; + } + grouperRef.current.updatePendingEvents(params); + }, + [] + ); + + return { + eventGroups: groupingState?.groups ?? {}, + isProcessing: groupingState?.status === 'processing', + groupingState, + updateEvents, + updatePendingEvents, + }; +} From 38351d802ceb47c7ee2e8f3c647053866fb795f0 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 6 Nov 2025 02:35:15 +0100 Subject: [PATCH 12/58] user grouper in history Signed-off-by: Assem Hafez --- .../helpers/workflow-history-fetcher.ts | 2 +- .../hooks/use-workflow-history-fetcher.ts | 16 ++++++++---- .../hooks/use-workflow-history-grouper.ts | 6 ++++- .../workflow-history/workflow-history.tsx | 25 +++++++++++-------- 4 files changed, 32 insertions(+), 17 deletions(-) diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.ts index 4b97cf89d..13ffeda3b 100644 --- a/src/views/workflow-history/helpers/workflow-history-fetcher.ts +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.ts @@ -100,7 +100,7 @@ export default class WorkflowHistoryFetcher { this.unsubscribe = null; } } - unmount(): void { + destroy(): void { this.stop(); this.observer.destroy(); } diff --git a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts index cfb375f13..54f8e33f0 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts @@ -6,6 +6,7 @@ import { useQueryClient, } from '@tanstack/react-query'; +import { type HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; import useThrottledState from '@/hooks/use-throttled-state'; import { type WorkflowHistoryQueryParams, @@ -19,6 +20,7 @@ import { type ShouldContinueCallback } from '../helpers/workflow-history-fetcher export default function useWorkflowHistoryFetcher( params: WorkflowHistoryQueryParams & RouteParams, + onEventsChange: (events: HistoryEvent[]) => void, throttleMs: number = 2000 ) { const queryClient = useQueryClient(); @@ -43,21 +45,25 @@ export default function useWorkflowHistoryFetcher( const unsubscribe = fetcherRef.current.onChange((state) => { const pagesCount = state.data?.pages?.length || 0; + onEventsChange( + state.data?.pages?.flatMap((page) => page.history?.events || []) || [] + ); // immediately set if there is the first page without throttling other wise throttle setHistoryQuery(() => state, pagesCount <= 1); }); - // Fetch first page - fetcherRef.current.start((state) => !state?.data?.pages?.length); - return () => { unsubscribe(); }; - }, [setHistoryQuery]); + }, [setHistoryQuery, onEventsChange]); useEffect(() => { + if (!fetcherRef.current) return; + + // Fetch first page + fetcherRef.current.start((state) => !state?.data?.pages?.length); return () => { - fetcherRef.current?.unmount(); + fetcherRef.current?.destroy(); }; }, []); diff --git a/src/views/workflow-history/hooks/use-workflow-history-grouper.ts b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts index a3ed136ef..f73105db1 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-grouper.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts @@ -26,7 +26,11 @@ export default function useWorkflowHistoryGrouper(throttleMs = 2000) { const [groupingState, setGroupingState] = useThrottledState( grouperRef.current.getState(), - throttleMs + throttleMs, + { + leading: true, + trailing: true, + } ); useEffect(() => { diff --git a/src/views/workflow-history/workflow-history.tsx b/src/views/workflow-history/workflow-history.tsx index 301c227bd..9f87c6304 100644 --- a/src/views/workflow-history/workflow-history.tsx +++ b/src/views/workflow-history/workflow-history.tsx @@ -29,12 +29,12 @@ import WORKFLOW_HISTORY_PAGE_SIZE_CONFIG from './config/workflow-history-page-si import compareUngroupedEvents from './helpers/compare-ungrouped-events'; import getSortableEventId from './helpers/get-sortable-event-id'; import getVisibleGroupsHasMissingEvents from './helpers/get-visible-groups-has-missing-events'; -import { groupHistoryEvents } from './helpers/group-history-events'; import pendingActivitiesInfoToEvents from './helpers/pending-activities-info-to-events'; import pendingDecisionInfoToEvent from './helpers/pending-decision-info-to-event'; import useEventExpansionToggle from './hooks/use-event-expansion-toggle'; import useInitialSelectedEvent from './hooks/use-initial-selected-event'; import useWorkflowHistoryFetcher from './hooks/use-workflow-history-fetcher'; +import useWorkflowHistoryGrouper from './hooks/use-workflow-history-grouper'; import WorkflowHistoryCompactEventCard from './workflow-history-compact-event-card/workflow-history-compact-event-card'; import { WorkflowHistoryContext } from './workflow-history-context-provider/workflow-history-context-provider'; import WorkflowHistoryHeader from './workflow-history-header/workflow-history-header'; @@ -59,6 +59,12 @@ export default function WorkflowHistory({ params }: Props) { waitForNewEvent: true, }; + const { + eventGroups, + updateEvents: updateGrouperEvents, + updatePendingEvents: updateGrouperPendingEvents, + } = useWorkflowHistoryGrouper(); + const { historyQuery, startLoadingHistory, @@ -73,6 +79,10 @@ export default function WorkflowHistory({ params }: Props) { pageSize: wfHistoryRequestArgs.pageSize, waitForNewEvent: wfHistoryRequestArgs.waitForNewEvent, }, + (events) => { + console.log('events', events); + updateGrouperEvents(events); + }, 2000 ); @@ -123,7 +133,7 @@ export default function WorkflowHistory({ params }: Props) { [result] ); - const pendingHistoryEvents = useMemo(() => { + useEffect(() => { const pendingStartActivities = pendingActivitiesInfoToEvents( wfExecutionDescription.pendingActivities ); @@ -131,16 +141,11 @@ export default function WorkflowHistory({ params }: Props) { ? pendingDecisionInfoToEvent(wfExecutionDescription.pendingDecision) : null; - return { + updateGrouperPendingEvents({ pendingStartActivities, pendingStartDecision, - }; - }, [wfExecutionDescription]); - - const eventGroups = useMemo( - () => groupHistoryEvents(events, pendingHistoryEvents), - [events, pendingHistoryEvents] - ); + }); + }, [wfExecutionDescription, updateGrouperPendingEvents]); const filteredEventGroupsEntries = useMemo( () => From 96c383f2161287bb5c65a25f8ca174e1ac031527 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Wed, 5 Nov 2025 13:57:50 +0100 Subject: [PATCH 13/58] update fetcher based on feedback Signed-off-by: Assem Hafez --- .../workflow-history-fetcher.test.tsx | 41 +++++---------- .../helpers/workflow-history-fetcher.ts | 52 +++++++++---------- .../helpers/workflow-history-fetcher.types.ts | 31 +++++++++-- 3 files changed, 63 insertions(+), 61 deletions(-) diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx index 200a9ebd3..aa1f59ddc 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx @@ -9,9 +9,13 @@ import mswMockEndpoints from '@/test-utils/msw-mock-handlers/helper/msw-mock-end import workflowHistoryMultiPageFixture from '../../__fixtures__/workflow-history-multi-page-fixture'; import WorkflowHistoryFetcher from '../workflow-history-fetcher'; -describe(WorkflowHistoryFetcher.name, () => { - let queryClient: QueryClient; +const RETRY_DELAY = 3000; +const RETRY_COUNT = 3; + +let queryClient: QueryClient; +let hoistedFetcher: WorkflowHistoryFetcher; +describe(WorkflowHistoryFetcher.name, () => { beforeEach(() => { queryClient = new QueryClient({ defaultOptions: { @@ -26,6 +30,7 @@ describe(WorkflowHistoryFetcher.name, () => { afterEach(() => { queryClient.clear(); + hoistedFetcher?.unmount(); }); it('should return the current query state from getCurrentState', async () => { @@ -34,8 +39,6 @@ describe(WorkflowHistoryFetcher.name, () => { const initialState = fetcher.getCurrentState(); expect(initialState.data).toBeUndefined(); expect(initialState.status).toBe('pending'); - - fetcher.unmount(); }); it('should call onChange callback on state changes', async () => { @@ -50,7 +53,6 @@ describe(WorkflowHistoryFetcher.name, () => { await waitFor(() => { expect(callback.mock.calls.length).toBeGreaterThan(initialCallCount); }); - fetcher.unmount(); }); it('should return unsubscribe function', async () => { @@ -61,6 +63,7 @@ describe(WorkflowHistoryFetcher.name, () => { const unsubscribe1 = fetcher.onChange(callback1); fetcher.onChange(callback2); + // Fetch the first page fetcher.start((state) => !state?.data?.pages?.length); await waitFor(() => { @@ -78,11 +81,9 @@ describe(WorkflowHistoryFetcher.name, () => { countBeforeUnsubscribe ); }); - - fetcher.unmount(); }); - it('should respect shouldContinue callback', async () => { + it('should not fetch any pages if shouldContinue callback returns false', async () => { const { fetcher } = setup(queryClient); const shouldContinue = jest.fn(() => false); @@ -95,8 +96,6 @@ describe(WorkflowHistoryFetcher.name, () => { const state = fetcher.getCurrentState(); expect(state.data?.pages || []).toHaveLength(0); - - fetcher.unmount(); }); it('should stop after shouldContinue returns false', async () => { @@ -112,8 +111,6 @@ describe(WorkflowHistoryFetcher.name, () => { expect(state.isFetching).toBe(false); expect(state.data?.pages).toHaveLength(2); }); - - fetcher.unmount(); }); it('should load all pages and auto-stop when there are no more pages', async () => { @@ -126,8 +123,6 @@ describe(WorkflowHistoryFetcher.name, () => { expect(state.hasNextPage).toBe(false); expect(state.data?.pages).toHaveLength(3); }); - - fetcher.unmount(); }); it('should auto-stop on error after initial success', async () => { @@ -144,8 +139,8 @@ describe(WorkflowHistoryFetcher.name, () => { expect(state.data?.pages).toHaveLength(1); }); - // Fast-forward through retry delays (3 retries * 3000ms each) - await jest.advanceTimersByTimeAsync(3 * 3000); + // Fast-forward through retry delays + await jest.advanceTimersByTimeAsync(RETRY_COUNT * RETRY_DELAY); await waitFor(() => { const state = fetcher.getCurrentState(); @@ -153,8 +148,6 @@ describe(WorkflowHistoryFetcher.name, () => { expect(state.isError).toBe(true); expect(state.data?.pages).toHaveLength(1); }); - - fetcher.unmount(); } finally { jest.useRealTimers(); } @@ -178,8 +171,6 @@ describe(WorkflowHistoryFetcher.name, () => { expect(state.isFetching).toBe(false); expect(state.data?.pages).toHaveLength(1); }); - - fetcher.unmount(); }); it('should allow start again after stop', async () => { @@ -210,7 +201,6 @@ describe(WorkflowHistoryFetcher.name, () => { const finalState = fetcher.getCurrentState(); expect(finalState.data?.pages).toHaveLength(3); - fetcher.unmount(); }); it('should fetch next page when available', async () => { @@ -230,8 +220,6 @@ describe(WorkflowHistoryFetcher.name, () => { const state = fetcher.getCurrentState(); expect(state.data?.pages).toHaveLength(2); }); - - fetcher.unmount(); }); it('should not fetch when already fetching', async () => { @@ -256,8 +244,6 @@ describe(WorkflowHistoryFetcher.name, () => { const state = fetcher.getCurrentState(); expect(state.data?.pages).toHaveLength(2); - - fetcher.unmount(); }); it('should not fetch when no next page available', async () => { @@ -275,7 +261,6 @@ describe(WorkflowHistoryFetcher.name, () => { const state = fetcher.getCurrentState(); expect(state.data?.pages.length).toBe(pageCountBefore); - fetcher.unmount(); }); }); @@ -289,8 +274,8 @@ function setup(client: QueryClient, options: { failOnPages?: number[] } = {}) { }; mockHistoryEndpoint(workflowHistoryMultiPageFixture, options.failOnPages); - const fetcher = new WorkflowHistoryFetcher(client, params); + hoistedFetcher = fetcher; const waitForData = async () => { let unsubscribe: (() => void) | undefined; @@ -325,7 +310,7 @@ function mockHistoryEndpoint( // Determine current page number based on nextPage param let pageNumber = 1; - if (!nextPage || nextPage === 'null' || nextPage === 'undefined') { + if (!nextPage) { pageNumber = 1; } else if (nextPage === 'page2') { pageNumber = 2; diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.ts index 13ffeda3b..a43dfaa31 100644 --- a/src/views/workflow-history/helpers/workflow-history-fetcher.ts +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.ts @@ -6,20 +6,18 @@ import { type GetWorkflowHistoryResponse, } from '@/route-handlers/get-workflow-history/get-workflow-history.types'; import request from '@/utils/request'; -import { type RequestError } from '@/utils/request/request-error'; import { type WorkflowHistoryQueryResult, type QueryResultOnChangeCallback, type ShouldContinueCallback, - type WorkflowHistoryQueryKey, + type WorkflowHistoryReactQueryParams, + type WorkflowHistoryInfiniteQueryOptions, + type WorkflowHistoryInfiniteQueryObserver, } from './workflow-history-fetcher.types'; export default class WorkflowHistoryFetcher { - private observer: InfiniteQueryObserver< - GetWorkflowHistoryResponse, - RequestError - >; + private observer: WorkflowHistoryInfiniteQueryObserver; private unsubscribe: (() => void) | null = null; private isStarted = false; @@ -27,12 +25,9 @@ export default class WorkflowHistoryFetcher { constructor( private readonly queryClient: QueryClient, - private readonly params: WorkflowHistoryQueryParams + private readonly params: WorkflowHistoryReactQueryParams ) { - this.observer = new InfiniteQueryObserver< - GetWorkflowHistoryResponse, - RequestError - >(this.queryClient, { + this.observer = new InfiniteQueryObserver(this.queryClient, { ...this.buildObserverOptions(this.params), }); } @@ -40,7 +35,7 @@ export default class WorkflowHistoryFetcher { onChange(callback: QueryResultOnChangeCallback): () => void { const current = this.getCurrentState(); if (current) callback(current); - return this.observer.subscribe((res: any) => { + return this.observer.subscribe((res) => { callback(res); }); } @@ -55,8 +50,7 @@ export default class WorkflowHistoryFetcher { let emitCount = 0; const currentState = this.observer.getCurrentResult(); const fetchedFirstPage = currentState.status !== 'pending'; - const shouldEnableQuery = - (!fetchedFirstPage && shouldContinue(currentState)) || fetchedFirstPage; + const shouldEnableQuery = !fetchedFirstPage && shouldContinue(currentState); if (shouldEnableQuery) { this.observer.setOptions({ @@ -68,7 +62,7 @@ export default class WorkflowHistoryFetcher { const emit = (res: WorkflowHistoryQueryResult) => { emitCount++; - // Auto stop when there are no more pages (end of history) or when there is a fresh error happens after the start. + // Auto stop when there are no more pages (end of history) or when there is an existing error from last start (emitCount === 1 means this is the first emit in the current start). // isError is true when the request failes and retries are exhausted. if (res.hasNextPage === false || (res.isError && emitCount > 1)) { this.stop(); @@ -81,15 +75,14 @@ export default class WorkflowHistoryFetcher { } }; - // only start emit (fetching next pages) after the initial fetch is complete - // first page is already fetched on the first subscription below + // Manual emit is needed to fetch the first next page after start is called. + // While this manual emit is not needed for on the first history page as enabling the query will fetch it automatically. if (fetchedFirstPage) { emit(currentState); } - if (this.unsubscribe) { - this.unsubscribe(); - } + // remove current listener (if exists) and add new one + this.unsubscribe?.(); this.unsubscribe = this.observer.subscribe((res) => emit(res)); } @@ -107,7 +100,8 @@ export default class WorkflowHistoryFetcher { fetchSingleNextPage(): void { const state = this.getCurrentState(); - + // If the query is still pending, enable it to fetch the first page. + // Otherwise, fetch the next page if it is not already fetching and there are more pages. if (state.status === 'pending') { this.observer.setOptions({ ...this.buildObserverOptions(this.params), @@ -117,21 +111,23 @@ export default class WorkflowHistoryFetcher { state.fetchNextPage(); } - getCurrentState(): WorkflowHistoryQueryResult { + getCurrentState() { return this.observer.getCurrentResult(); } - private buildObserverOptions(params: WorkflowHistoryQueryParams) { + private buildObserverOptions( + queryParams: WorkflowHistoryReactQueryParams + ): WorkflowHistoryInfiniteQueryOptions { return { - queryKey: ['workflow_history', params] satisfies WorkflowHistoryQueryKey, - queryFn: ({ queryKey: [_, qp], pageParam }: any) => + queryKey: ['workflow_history', queryParams], + queryFn: ({ queryKey: [_, params], pageParam }) => request( queryString.stringifyUrl({ - url: `/api/domains/${qp.domain}/${qp.cluster}/workflows/${qp.workflowId}/${qp.runId}/history`, + url: `/api/domains/${params.domain}/${params.cluster}/workflows/${params.workflowId}/${params.runId}/history`, query: { nextPage: pageParam, - pageSize: qp.pageSize, - waitForNewEvent: qp.waitForNewEvent ?? false, + pageSize: params.pageSize, + waitForNewEvent: params.waitForNewEvent ?? false, } satisfies WorkflowHistoryQueryParams, }) ).then((res) => res.json()), diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.types.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.types.ts index 457f8b37b..d622f8db1 100644 --- a/src/views/workflow-history/helpers/workflow-history-fetcher.types.ts +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.types.ts @@ -1,20 +1,41 @@ import { + type InfiniteQueryObserver, type InfiniteData, - type InfiniteQueryObserverResult, + type UseInfiniteQueryOptions, } from '@tanstack/react-query'; import { type WorkflowHistoryQueryParams, type GetWorkflowHistoryResponse, + type RouteParams, } from '@/route-handlers/get-workflow-history/get-workflow-history.types'; import { type RequestError } from '@/utils/request/request-error'; -export type WorkflowHistoryQueryKey = [string, WorkflowHistoryQueryParams]; +export type WorkflowHistoryReactQueryParams = RouteParams & + WorkflowHistoryQueryParams; -export type WorkflowHistoryQueryResult = InfiniteQueryObserverResult< - InfiniteData, - RequestError +export type WorkflowHistoryInfiniteQueryObserver = InfiniteQueryObserver< + GetWorkflowHistoryResponse, + RequestError, + InfiniteData, + GetWorkflowHistoryResponse, + WorkflowHistoryQueryKey, + string | undefined >; +export type WorkflowHistoryQueryKey = [string, WorkflowHistoryReactQueryParams]; + +export type WorkflowHistoryInfiniteQueryOptions = UseInfiniteQueryOptions< + GetWorkflowHistoryResponse, + RequestError, + InfiniteData, + GetWorkflowHistoryResponse, + WorkflowHistoryQueryKey, + string | undefined +>; +export type WorkflowHistoryQueryResult = ReturnType< + WorkflowHistoryInfiniteQueryObserver['getCurrentResult'] +>; + export type QueryResultOnChangeCallback = ( state: WorkflowHistoryQueryResult ) => void; From 1e8eae65f613a4a939395a2714a48b805167ccaf Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Wed, 5 Nov 2025 16:24:05 +0100 Subject: [PATCH 14/58] rename unmout to destroy Signed-off-by: Assem Hafez --- .../helpers/__tests__/workflow-history-fetcher.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx index aa1f59ddc..f2d59e315 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx @@ -30,7 +30,7 @@ describe(WorkflowHistoryFetcher.name, () => { afterEach(() => { queryClient.clear(); - hoistedFetcher?.unmount(); + hoistedFetcher?.destroy(); }); it('should return the current query state from getCurrentState', async () => { From 33d116350a949727a22cea6f028b66d074bc9542 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Mon, 3 Nov 2025 11:41:28 +0100 Subject: [PATCH 15/58] Create hook for fetching history Signed-off-by: Assem Hafez --- .../hooks/__tests__/use-workflow-history-fetcher.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx index 9c5a83976..6ab4ab21d 100644 --- a/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx +++ b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx @@ -179,7 +179,7 @@ describe(useWorkflowHistoryFetcher.name, () => { unmount(); - expect(mockFetcherInstance.unmount).toHaveBeenCalledTimes(1); + expect(mockFetcherInstance.destroy).toHaveBeenCalledTimes(1); }); it('should return all expected methods and state', () => { From 1437921621dd34eb3a387298b4b266dbc42dc519 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Wed, 5 Nov 2025 17:34:10 +0100 Subject: [PATCH 16/58] move condition into executeImmediately Signed-off-by: Assem Hafez --- .../workflow-history/hooks/use-workflow-history-fetcher.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts index 54f8e33f0..1f7a1de39 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts @@ -49,7 +49,8 @@ export default function useWorkflowHistoryFetcher( state.data?.pages?.flatMap((page) => page.history?.events || []) || [] ); // immediately set if there is the first page without throttling other wise throttle - setHistoryQuery(() => state, pagesCount <= 1); + const executeImmediately = pagesCount <= 1; + setHistoryQuery(() => state, executeImmediately); }); return () => { From 860dc413a10d0160b70711770660a9bc459d8812 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Wed, 5 Nov 2025 17:49:27 +0100 Subject: [PATCH 17/58] update destroy in method Signed-off-by: Assem Hafez --- .../hooks/__tests__/use-workflow-history-fetcher.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx index 6ab4ab21d..8f67dcbb7 100644 --- a/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx +++ b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx @@ -39,7 +39,7 @@ describe(useWorkflowHistoryFetcher.name, () => { mockFetcherInstance = { start: jest.fn(), stop: jest.fn(), - unmount: jest.fn(), + destroy: jest.fn(), fetchSingleNextPage: jest.fn(), onChange: jest.fn((callback) => { mockOnChangeCallback.mockImplementation(callback); From 32335a869edeb8a43a07cf1659d752e2426de26c Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 6 Nov 2025 02:59:09 +0100 Subject: [PATCH 18/58] mock grouper throttle Signed-off-by: Assem Hafez --- .../workflow-history/__tests__/workflow-history.test.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/views/workflow-history/__tests__/workflow-history.test.tsx b/src/views/workflow-history/__tests__/workflow-history.test.tsx index 797b1fde6..8109f4aad 100644 --- a/src/views/workflow-history/__tests__/workflow-history.test.tsx +++ b/src/views/workflow-history/__tests__/workflow-history.test.tsx @@ -33,7 +33,9 @@ jest.mock('../hooks/use-workflow-history-fetcher', () => { const actual = jest.requireActual('../hooks/use-workflow-history-fetcher'); return { __esModule: true, - default: jest.fn((params) => actual.default(params, 0)), // 0ms throttle for tests + default: jest.fn((params, onEventsChange) => + actual.default(params, onEventsChange, 0) + ), // 0ms throttle for tests }; }); From 6958fc10db0bbf0f90bd59bb63a6f6c314779d16 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 6 Nov 2025 03:10:14 +0100 Subject: [PATCH 19/58] fix use fetcher test case Signed-off-by: Assem Hafez --- .../hooks/__tests__/use-workflow-history-fetcher.test.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx index 8f67dcbb7..0cb9e7332 100644 --- a/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx +++ b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx @@ -19,13 +19,17 @@ let mockOnChangeCallback: jest.Mock; let mockUnsubscribe: jest.Mock; function setup() { - const hookResult = renderHook(() => useWorkflowHistoryFetcher(mockParams)); + const mockOnEventsChange = jest.fn(); + const hookResult = renderHook(() => + useWorkflowHistoryFetcher(mockParams, mockOnEventsChange) + ); return { ...hookResult, mockFetcherInstance, mockOnChangeCallback, mockUnsubscribe, + mockOnEventsChange, }; } From d59be3a9046a043f2690a4da0b2ada9e0244baab Mon Sep 17 00:00:00 2001 From: Assem Hafez <137278762+Assem-Uber@users.noreply.github.com> Date: Wed, 5 Nov 2025 10:23:21 +0100 Subject: [PATCH 20/58] feat: Grouped events table header (#1059) * Grouped events header Signed-off-by: Assem Hafez * add test cases Signed-off-by: Assem Hafez * fix comment typo Signed-off-by: Assem Hafez * add space for reset button Signed-off-by: Assem Hafez * move grouped table Signed-off-by: Assem Hafez * Update src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.tsx Co-authored-by: Adhitya Mamallan * Change Id to ID Signed-off-by: Assem Hafez --------- Signed-off-by: Assem Hafez Co-authored-by: Adhitya Mamallan --- .../workflow-history-grouped-table.test.tsx | 32 ++++++++++++++++ ...orkflow-history-grouped-table.constants.ts | 3 ++ .../workflow-history-grouped-table.styles.ts | 19 ++++++++++ .../workflow-history-grouped-table.tsx | 20 ++++++++++ .../workflow-history-grouped-table.types.ts | 37 +++++++++++++++++++ 5 files changed, 111 insertions(+) create mode 100644 src/views/workflow-history-v2/workflow-history-grouped-table/__tests__/workflow-history-grouped-table.test.tsx create mode 100644 src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.constants.ts create mode 100644 src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.styles.ts create mode 100644 src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.tsx create mode 100644 src/views/workflow-history/workflow-history-grouped-table/workflow-history-grouped-table.types.ts diff --git a/src/views/workflow-history-v2/workflow-history-grouped-table/__tests__/workflow-history-grouped-table.test.tsx b/src/views/workflow-history-v2/workflow-history-grouped-table/__tests__/workflow-history-grouped-table.test.tsx new file mode 100644 index 000000000..a4c96d619 --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-grouped-table/__tests__/workflow-history-grouped-table.test.tsx @@ -0,0 +1,32 @@ +import React from 'react'; + +import { render, screen } from '@/test-utils/rtl'; + +import WorkflowHistoryGroupedTable from '../workflow-history-grouped-table'; + +describe(WorkflowHistoryGroupedTable.name, () => { + it('should render all column headers in correct order', () => { + setup(); + + expect(screen.getByText('ID')).toBeInTheDocument(); + expect(screen.getByText('Event group')).toBeInTheDocument(); + expect(screen.getByText('Status')).toBeInTheDocument(); + expect(screen.getByText('Time')).toBeInTheDocument(); + expect(screen.getByText('Duration')).toBeInTheDocument(); + expect(screen.getByText('Details')).toBeInTheDocument(); + }); + + it('should apply grid layout to table header', () => { + setup(); + + const header = screen.getByText('ID').parentElement; + expect(header).toHaveStyle({ + display: 'grid', + gridTemplateColumns: '0.3fr 2fr 1fr 1.2fr 1fr 3fr minmax(0, 70px)', + }); + }); +}); + +function setup() { + return render(); +} diff --git a/src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.constants.ts b/src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.constants.ts new file mode 100644 index 000000000..e5a35d7fb --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.constants.ts @@ -0,0 +1,3 @@ +// ID, event group, status, time, duration, details, Reset button +export const WORKFLOW_HISTORY_GROUPED_GRID_TEMPLATE_COLUMNS = + '0.3fr 2fr 1fr 1.2fr 1fr 3fr minmax(0, 70px)'; diff --git a/src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.styles.ts b/src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.styles.ts new file mode 100644 index 000000000..53d06e60a --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.styles.ts @@ -0,0 +1,19 @@ +import { styled as createStyled, type Theme } from 'baseui'; + +import { WORKFLOW_HISTORY_GROUPED_GRID_TEMPLATE_COLUMNS } from './workflow-history-grouped-table.constants'; + +export const styled = { + TableHeader: createStyled('div', ({ $theme }: { $theme: Theme }) => ({ + // border thickness + accordion panel left padding + paddingLeft: `calc(2px + ${$theme.sizing.scale700})`, + // accordion panel expand icon size + accordion panel right padding + border thickness + paddingRight: `calc(${$theme.sizing.scale800} + ${$theme.sizing.scale700} + 2px)`, + paddingBottom: $theme.sizing.scale200, + ...$theme.typography.LabelXSmall, + color: $theme.colors.contentSecondary, + display: 'grid', + gridTemplateColumns: WORKFLOW_HISTORY_GROUPED_GRID_TEMPLATE_COLUMNS, + gap: $theme.sizing.scale600, + width: '100%', + })), +}; diff --git a/src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.tsx b/src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.tsx new file mode 100644 index 000000000..f5191bedd --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-grouped-table/workflow-history-grouped-table.tsx @@ -0,0 +1,20 @@ +import { styled } from './workflow-history-grouped-table.styles'; + +/** + * To be used in History v2 + */ +export default function WorkflowHistoryGroupedTable() { + return ( + <> + +
ID
+
Event group
+
Status
+
Time
+
Duration
+
Details
+
+ {/* TODO @adhityamamallan: Add table body with Virtuoso with new design*/} + + ); +} diff --git a/src/views/workflow-history/workflow-history-grouped-table/workflow-history-grouped-table.types.ts b/src/views/workflow-history/workflow-history-grouped-table/workflow-history-grouped-table.types.ts new file mode 100644 index 000000000..5059f9543 --- /dev/null +++ b/src/views/workflow-history/workflow-history-grouped-table/workflow-history-grouped-table.types.ts @@ -0,0 +1,37 @@ +import { type ListRange, type VirtuosoHandle } from 'react-virtuoso'; + +import { type RequestError } from '@/utils/request/request-error'; +import { type WorkflowPageTabsParams } from '@/views/workflow-page/workflow-page-tabs/workflow-page-tabs.types'; + +import { + type GetIsEventExpanded, + type ToggleIsEventExpanded, +} from '../../workflow-history/hooks/use-event-expansion-toggle.types'; +import { type HistoryEventsGroup } from '../../workflow-history/workflow-history.types'; + +export type Props = { + // Data and state props + eventGroupsEntries: Array<[string, HistoryEventsGroup]>; + selectedEventId?: string; + decodedPageUrlParams: WorkflowPageTabsParams; + onResetToEventId: (eventId: string) => void; + + // React Query props + error: RequestError | null; + hasMoreEvents: boolean; + isFetchingMoreEvents: boolean; + fetchMoreEvents: () => void; + + // Event expansion state management + getIsEventExpanded: GetIsEventExpanded; + toggleIsEventExpanded: ToggleIsEventExpanded; + + // Virtualization props + onVisibleRangeChange: (r: ListRange) => void; + virtuosoRef: React.RefObject; + + // Workflow info + workflowCloseTimeMs?: number | null; + workflowIsArchived: boolean; + reachedAvailableHistoryEnd: boolean; +}; From 9122ff50c063cf4a883eef518d67a2739357c66e Mon Sep 17 00:00:00 2001 From: Tim Chan Date: Wed, 5 Nov 2025 15:35:30 -0800 Subject: [PATCH 21/58] chore: Fixed docker URI for kafka image (#1066) Signed-off-by: Tim Chan --- docker-compose-backend-services.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose-backend-services.yml b/docker-compose-backend-services.yml index 939d07a26..444870dfe 100644 --- a/docker-compose-backend-services.yml +++ b/docker-compose-backend-services.yml @@ -23,7 +23,7 @@ services: ports: - '9090:9090' kafka: - image: docker.io/bitnami/kafka:3.7 + image: docker.io/bitnamilegacy/kafka:3.7 hostname: kafka container_name: kafka ports: From a1927622b99e64bbc833ad3b1df453ee51775dd0 Mon Sep 17 00:00:00 2001 From: Tim Chan Date: Wed, 5 Nov 2025 18:49:43 -0800 Subject: [PATCH 22/58] chore: New feature flag for Cron List View (#1068) Signed-off-by: Tim Chan --- src/config/dynamic/dynamic.config.ts | 12 ++++++++++++ src/config/dynamic/resolvers/cron-list-enabled.ts | 4 ++++ .../dynamic/resolvers/schemas/resolver-schemas.ts | 4 ++++ .../config/__fixtures__/resolved-config-values.ts | 1 + 4 files changed, 21 insertions(+) create mode 100644 src/config/dynamic/resolvers/cron-list-enabled.ts diff --git a/src/config/dynamic/dynamic.config.ts b/src/config/dynamic/dynamic.config.ts index c0ced1c38..978e19fd8 100644 --- a/src/config/dynamic/dynamic.config.ts +++ b/src/config/dynamic/dynamic.config.ts @@ -11,6 +11,7 @@ import clusters from './resolvers/clusters'; import clustersPublic from './resolvers/clusters-public'; import { type PublicClustersConfigs } from './resolvers/clusters-public.types'; import { type ClustersConfigs } from './resolvers/clusters.types'; +import cronListEnabled from './resolvers/cron-list-enabled'; import extendedDomainInfoEnabled from './resolvers/extended-domain-info-enabled'; import { type ExtendedDomainInfoEnabledConfig } from './resolvers/extended-domain-info-enabled.types'; import workflowActionsEnabled from './resolvers/workflow-actions-enabled'; @@ -34,6 +35,12 @@ const dynamicConfigs: { 'serverStart', true >; + CRON_LIST_ENABLED: ConfigAsyncResolverDefinition< + undefined, + boolean, + 'request', + true + >; WORKFLOW_ACTIONS_ENABLED: ConfigAsyncResolverDefinition< WorkflowActionsEnabledResolverParams, WorkflowActionsEnabledConfig, @@ -77,6 +84,11 @@ const dynamicConfigs: { evaluateOn: 'serverStart', isPublic: true, }, + CRON_LIST_ENABLED: { + resolver: cronListEnabled, + evaluateOn: 'request', + isPublic: true, + }, WORKFLOW_ACTIONS_ENABLED: { resolver: workflowActionsEnabled, evaluateOn: 'request', diff --git a/src/config/dynamic/resolvers/cron-list-enabled.ts b/src/config/dynamic/resolvers/cron-list-enabled.ts new file mode 100644 index 000000000..11b70da73 --- /dev/null +++ b/src/config/dynamic/resolvers/cron-list-enabled.ts @@ -0,0 +1,4 @@ +export default async function cronListEnabled(): Promise { + // Check for environment variable override, default to false (disabled) + return process.env.CRON_LIST_ENABLED?.toLowerCase() === 'true'; +} diff --git a/src/config/dynamic/resolvers/schemas/resolver-schemas.ts b/src/config/dynamic/resolvers/schemas/resolver-schemas.ts index 0d94706ce..41c31d7d6 100644 --- a/src/config/dynamic/resolvers/schemas/resolver-schemas.ts +++ b/src/config/dynamic/resolvers/schemas/resolver-schemas.ts @@ -44,6 +44,10 @@ const resolverSchemas: ResolverSchemas = { start: workflowActionsEnabledValueSchema, }), }, + CRON_LIST_ENABLED: { + args: z.undefined(), + returnType: z.boolean(), + }, EXTENDED_DOMAIN_INFO_ENABLED: { args: z.undefined(), returnType: z.object({ diff --git a/src/utils/config/__fixtures__/resolved-config-values.ts b/src/utils/config/__fixtures__/resolved-config-values.ts index 6f7589b10..529754233 100644 --- a/src/utils/config/__fixtures__/resolved-config-values.ts +++ b/src/utils/config/__fixtures__/resolved-config-values.ts @@ -27,6 +27,7 @@ const mockResolvedConfigValues: LoadedConfigResolvedValues = { clusterName: 'mock-cluster2', }, ], + CRON_LIST_ENABLED: false, WORKFLOW_ACTIONS_ENABLED: { terminate: 'ENABLED', cancel: 'ENABLED', From d7b05b9989636f299109f8977d73d9d9a7cb4ac6 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Mon, 3 Nov 2025 13:16:44 +0100 Subject: [PATCH 23/58] use fetcher in workflow history --- src/views/workflow-history/workflow-history.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/src/views/workflow-history/workflow-history.tsx b/src/views/workflow-history/workflow-history.tsx index 9f87c6304..1bb8d960f 100644 --- a/src/views/workflow-history/workflow-history.tsx +++ b/src/views/workflow-history/workflow-history.tsx @@ -80,7 +80,6 @@ export default function WorkflowHistory({ params }: Props) { waitForNewEvent: wfHistoryRequestArgs.waitForNewEvent, }, (events) => { - console.log('events', events); updateGrouperEvents(events); }, 2000 From 2b9eb1126c24224965a22ae82a17b5f6f6993f6b Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Fri, 7 Nov 2025 17:44:02 +0100 Subject: [PATCH 24/58] fix content clicks Signed-off-by: Assem Hafez --- .../workflow-history-header.styles.ts | 1 + src/views/workflow-history/workflow-history.styles.ts | 11 +++++++++-- src/views/workflow-history/workflow-history.tsx | 4 ++-- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/views/workflow-history/workflow-history-header/workflow-history-header.styles.ts b/src/views/workflow-history/workflow-history-header/workflow-history-header.styles.ts index 0e46ed7fe..b644ce1fa 100644 --- a/src/views/workflow-history/workflow-history-header/workflow-history-header.styles.ts +++ b/src/views/workflow-history/workflow-history-header/workflow-history-header.styles.ts @@ -26,6 +26,7 @@ export const styled = { position: 'sticky', top: 0, boxShadow: $isSticky ? $theme.lighting.shallowBelow : 'none', + zIndex: 1, }, }), }) diff --git a/src/views/workflow-history/workflow-history.styles.ts b/src/views/workflow-history/workflow-history.styles.ts index 174289182..c0ecfd289 100644 --- a/src/views/workflow-history/workflow-history.styles.ts +++ b/src/views/workflow-history/workflow-history.styles.ts @@ -4,12 +4,19 @@ import type { } from '@/hooks/use-styletron-classes'; const cssStylesObj = { + container: { + display: 'flex', + flexDirection: 'column', + flex: 1, + // This is to ensure the header section z-index is relative to this container and do not + // show above external elements like popvers and modals + position: 'relative', + zIndex: 0, + }, contentSection: { display: 'flex', flexDirection: 'column', flex: 1, - // This is to ensure the content section is behind the header - zIndex: -1, }, eventsContainer: (theme) => ({ display: 'flex', diff --git a/src/views/workflow-history/workflow-history.tsx b/src/views/workflow-history/workflow-history.tsx index 1bb8d960f..8f927a58e 100644 --- a/src/views/workflow-history/workflow-history.tsx +++ b/src/views/workflow-history/workflow-history.tsx @@ -327,7 +327,7 @@ export default function WorkflowHistory({ params }: Props) { } return ( - <> +
)} - +
); } From 7803b257409c071050df91d68b4b835748b0547d Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Fri, 7 Nov 2025 19:08:59 +0100 Subject: [PATCH 25/58] fix use initial selected event Signed-off-by: Assem Hafez --- .../use-initial-selected-event.test.ts | 166 ++++++++++++++++-- .../hooks/use-initial-selected-event.ts | 22 +-- .../hooks/use-initial-selected-event.types.ts | 6 +- .../workflow-history/workflow-history.tsx | 2 +- 4 files changed, 165 insertions(+), 31 deletions(-) diff --git a/src/views/workflow-history/hooks/__tests__/use-initial-selected-event.test.ts b/src/views/workflow-history/hooks/__tests__/use-initial-selected-event.test.ts index 79f9771b7..1d89df8d7 100644 --- a/src/views/workflow-history/hooks/__tests__/use-initial-selected-event.test.ts +++ b/src/views/workflow-history/hooks/__tests__/use-initial-selected-event.test.ts @@ -1,21 +1,36 @@ import { renderHook } from '@/test-utils/rtl'; -import { completedDecisionTaskEvents } from '../../__fixtures__/workflow-history-decision-events'; +import { + mockActivityEventGroup, + mockDecisionEventGroup, + mockTimerEventGroup, + mockSingleEventGroup, +} from '../../__fixtures__/workflow-history-event-groups'; +import { type HistoryEventsGroup } from '../../workflow-history.types'; import useInitialSelectedEvent from '../use-initial-selected-event'; -jest.mock('../../helpers/get-history-event-group-id'); - describe('useInitialSelectedEvent', () => { - const events = [...completedDecisionTaskEvents]; - const filteredEventGroupsEntries: [string, any][] = [ - ['group1', completedDecisionTaskEvents], - ]; + // Create a more realistic set of event groups with multiple types + const mockEventGroups: Record = { + '1': mockSingleEventGroup, + '2': mockDecisionEventGroup, + '5': mockActivityEventGroup, + '10': mockTimerEventGroup, + '11': mockDecisionEventGroup, + '12': mockActivityEventGroup, + }; + + it('should return shouldSearchForInitialEvent as true when selectedEventId is defined', () => { + // Filtered entries contain only a subset of all event groups + const filteredEventGroupsEntries: [string, HistoryEventsGroup][] = [ + ['2', mockEventGroups['2']], + ['5', mockEventGroups['5']], + ]; - it('should return shouldSearchForInitialEvent as true when initialEventId is defined', () => { const { result } = renderHook(() => useInitialSelectedEvent({ selectedEventId: '2', - events, + eventGroups: mockEventGroups, filteredEventGroupsEntries, }) ); @@ -23,11 +38,17 @@ describe('useInitialSelectedEvent', () => { expect(result.current.shouldSearchForInitialEvent).toBe(true); }); - it('should return shouldSearchForInitialEvent as false when initialEventId is undefined', () => { + it('should return shouldSearchForInitialEvent as false when selectedEventId is undefined', () => { + const filteredEventGroupsEntries: [string, HistoryEventsGroup][] = [ + ['1', mockEventGroups['1']], + ['2', mockEventGroups['2']], + ['5', mockEventGroups['5']], + ]; + const { result } = renderHook(() => useInitialSelectedEvent({ selectedEventId: undefined, - events, + eventGroups: mockEventGroups, filteredEventGroupsEntries, }) ); @@ -35,27 +56,138 @@ describe('useInitialSelectedEvent', () => { expect(result.current.shouldSearchForInitialEvent).toBe(false); }); - it('should return initialEventGroupIndex as undefined when initialEventId is defined & group is not found', () => { + it('should return initialEventGroupIndex when event is found in a group and group key matches event ID', () => { + // Filtered entries contain only a subset - event '2' is at index 1 + const filteredEventGroupsEntries: [string, HistoryEventsGroup][] = [ + ['1', mockEventGroups['1']], + ['2', mockEventGroups['2']], + ['5', mockEventGroups['5']], + ]; + const { result } = renderHook(() => useInitialSelectedEvent({ - selectedEventId: '500', - events, - filteredEventGroupsEntries: [], + selectedEventId: '2', + eventGroups: mockEventGroups, + filteredEventGroupsEntries, + }) + ); + + expect(result.current.initialEventGroupIndex).toBe(1); + expect(result.current.initialEventFound).toBe(true); + }); + + it('should return initialEventGroupIndex as undefined when selectedEventId is defined & group is not found in filtered entries', () => { + // Group '2' exists in mockEventGroups but is filtered out from the visible list + const filteredEventGroupsEntries: [string, HistoryEventsGroup][] = [ + ['1', mockEventGroups['1']], + ['10', mockEventGroups['10']], + ]; + + const { result } = renderHook(() => + useInitialSelectedEvent({ + selectedEventId: '2', + eventGroups: mockEventGroups, + filteredEventGroupsEntries, }) ); expect(result.current.initialEventGroupIndex).toBe(undefined); }); - it('should return initialEventFound as false when initialEventId is defined & event is not found', () => { + it('should find event when group key does not match event ID but group contains the event', () => { + // Group key is '5' but contains event with ID '7' (activity events) + // The hook should find the event in the group regardless of the group key not matching + // Event '7' is in group '5' which is at index 1 in filtered entries + const filteredEventGroupsEntries: [string, HistoryEventsGroup][] = [ + ['2', mockEventGroups['2']], + ['5', mockEventGroups['5']], + ]; + + const { result } = renderHook(() => + useInitialSelectedEvent({ + selectedEventId: '7', + eventGroups: mockEventGroups, + filteredEventGroupsEntries, + }) + ); + + expect(result.current.initialEventFound).toBe(true); + expect(result.current.initialEventGroupIndex).toBe(1); + }); + + it('should return initialEventFound as false when selectedEventId is defined & event is not found in groups', () => { + // Event ID '500' doesn't exist in any group + const filteredEventGroupsEntries: [string, HistoryEventsGroup][] = [ + ['1', mockEventGroups['1']], + ['2', mockEventGroups['2']], + ['5', mockEventGroups['5']], + ['10', mockEventGroups['10']], + ]; + const { result } = renderHook(() => useInitialSelectedEvent({ selectedEventId: '500', - events, + eventGroups: mockEventGroups, filteredEventGroupsEntries, }) ); expect(result.current.initialEventFound).toBe(false); }); + + it('should return initialEventFound as false when eventGroups is empty', () => { + // Edge case: no event groups available at all + const { result } = renderHook(() => + useInitialSelectedEvent({ + selectedEventId: '2', + eventGroups: {}, + filteredEventGroupsEntries: [], + }) + ); + + expect(result.current.initialEventFound).toBe(false); + expect(result.current.initialEventGroupIndex).toBe(undefined); + }); + + it('should find event at correct index when multiple groups are filtered', () => { + // Realistic scenario: many groups but only some are visible after filtering + // Event '7' is in group '5' which should be at index 2 in the filtered list + const filteredEventGroupsEntries: [string, HistoryEventsGroup][] = [ + ['1', mockEventGroups['1']], + ['2', mockEventGroups['2']], + ['5', mockEventGroups['5']], + ]; + + const { result } = renderHook(() => + useInitialSelectedEvent({ + selectedEventId: '7', + eventGroups: mockEventGroups, + filteredEventGroupsEntries, + }) + ); + + expect(result.current.initialEventFound).toBe(true); + expect(result.current.initialEventGroupIndex).toBe(2); + }); + + it('should handle event at the end of filtered list', () => { + // Event '16' is in group '10' which is at the last position in the filtered list + const filteredEventGroupsEntries: [string, HistoryEventsGroup][] = [ + ['1', mockEventGroups['1']], + ['2', mockEventGroups['2']], + ['5', mockEventGroups['5']], + ['10', mockEventGroups['10']], + ]; + + const { result } = renderHook(() => + useInitialSelectedEvent({ + selectedEventId: '16', + eventGroups: mockEventGroups, + filteredEventGroupsEntries, + }) + ); + + expect(result.current.initialEventFound).toBe(true); + expect(result.current.initialEventGroupIndex).toBe(3); + }); }); diff --git a/src/views/workflow-history/hooks/use-initial-selected-event.ts b/src/views/workflow-history/hooks/use-initial-selected-event.ts index bbe31f9e4..c85a781c6 100644 --- a/src/views/workflow-history/hooks/use-initial-selected-event.ts +++ b/src/views/workflow-history/hooks/use-initial-selected-event.ts @@ -1,7 +1,5 @@ import { useMemo, useState } from 'react'; -import getHistoryEventGroupId from '../helpers/get-history-event-group-id'; - import { type UseInitialSelectedEventParams } from './use-initial-selected-event.types'; /* @@ -12,27 +10,31 @@ import { type UseInitialSelectedEventParams } from './use-initial-selected-event */ export default function useInitialSelectedEvent({ selectedEventId, - events, + eventGroups, filteredEventGroupsEntries, }: UseInitialSelectedEventParams) { + // preserve initial event id even if prop changed. const [initialEventId] = useState(selectedEventId); - const initialEvent = useMemo(() => { + const initialEventGroupEntry = useMemo(() => { if (!initialEventId) return undefined; - return events.find((e) => e.eventId === initialEventId); - }, [events, initialEventId]); + + return Object.entries(eventGroups).find(([_, group]) => + group.events.find((e) => e.eventId === initialEventId) + ); + }, [eventGroups, initialEventId]); const shouldSearchForInitialEvent = initialEventId !== undefined; - const initialEventFound = initialEvent !== undefined; + const initialEventFound = initialEventGroupEntry !== undefined; const initialEventGroupIndex = useMemo(() => { - if (!initialEvent) return undefined; - const groupId = getHistoryEventGroupId(initialEvent); + if (!initialEventGroupEntry) return undefined; + const groupId = initialEventGroupEntry[0]; const index = filteredEventGroupsEntries.findIndex( ([id]) => id === groupId ); return index > -1 ? index : undefined; - }, [initialEvent, filteredEventGroupsEntries]); + }, [initialEventGroupEntry, filteredEventGroupsEntries]); return { shouldSearchForInitialEvent, diff --git a/src/views/workflow-history/hooks/use-initial-selected-event.types.ts b/src/views/workflow-history/hooks/use-initial-selected-event.types.ts index c8366a006..7e34369a2 100644 --- a/src/views/workflow-history/hooks/use-initial-selected-event.types.ts +++ b/src/views/workflow-history/hooks/use-initial-selected-event.types.ts @@ -1,7 +1,7 @@ -import { type HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; +import { type HistoryEventsGroup } from '../workflow-history.types'; export type UseInitialSelectedEventParams = { - events: HistoryEvent[]; + eventGroups: Record; selectedEventId?: string; - filteredEventGroupsEntries: [string, any][]; + filteredEventGroupsEntries: [string, HistoryEventsGroup][]; }; diff --git a/src/views/workflow-history/workflow-history.tsx b/src/views/workflow-history/workflow-history.tsx index 8f927a58e..81100ab14 100644 --- a/src/views/workflow-history/workflow-history.tsx +++ b/src/views/workflow-history/workflow-history.tsx @@ -237,7 +237,7 @@ export default function WorkflowHistory({ params }: Props) { shouldSearchForInitialEvent, } = useInitialSelectedEvent({ selectedEventId: queryParams.historySelectedEventId, - events, + eventGroups, filteredEventGroupsEntries, }); From 01e7eb688e0f3e0262e642d479b7073edb307b9a Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Fri, 7 Nov 2025 19:56:23 +0100 Subject: [PATCH 26/58] increase page size Signed-off-by: Assem Hafez --- .../config/workflow-history-page-size.config.ts | 2 +- .../hooks/use-initial-selected-event.ts | 15 +++++++++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/src/views/workflow-history/config/workflow-history-page-size.config.ts b/src/views/workflow-history/config/workflow-history-page-size.config.ts index 933cf92dd..901f6c373 100644 --- a/src/views/workflow-history/config/workflow-history-page-size.config.ts +++ b/src/views/workflow-history/config/workflow-history-page-size.config.ts @@ -1,3 +1,3 @@ -const WORKFLOW_HISTORY_PAGE_SIZE_CONFIG = 200; +const WORKFLOW_HISTORY_PAGE_SIZE_CONFIG = 1000; export default WORKFLOW_HISTORY_PAGE_SIZE_CONFIG; diff --git a/src/views/workflow-history/hooks/use-initial-selected-event.ts b/src/views/workflow-history/hooks/use-initial-selected-event.ts index c85a781c6..f3f055f5c 100644 --- a/src/views/workflow-history/hooks/use-initial-selected-event.ts +++ b/src/views/workflow-history/hooks/use-initial-selected-event.ts @@ -1,4 +1,4 @@ -import { useMemo, useState } from 'react'; +import { useMemo, useRef, useState } from 'react'; import { type UseInitialSelectedEventParams } from './use-initial-selected-event.types'; @@ -15,6 +15,7 @@ export default function useInitialSelectedEvent({ }: UseInitialSelectedEventParams) { // preserve initial event id even if prop changed. const [initialEventId] = useState(selectedEventId); + const foundGroupIndexRef = useRef(undefined); const initialEventGroupEntry = useMemo(() => { if (!initialEventId) return undefined; @@ -29,11 +30,21 @@ export default function useInitialSelectedEvent({ const initialEventGroupIndex = useMemo(() => { if (!initialEventGroupEntry) return undefined; + const groupId = initialEventGroupEntry[0]; + // If group index not change do not search again. + if ( + foundGroupIndexRef.current && + filteredEventGroupsEntries[foundGroupIndexRef.current][0] === groupId + ) + return foundGroupIndexRef.current; + const index = filteredEventGroupsEntries.findIndex( ([id]) => id === groupId ); - return index > -1 ? index : undefined; + const foundGroupIndex = index > -1 ? index : undefined; + foundGroupIndexRef.current = foundGroupIndex; + return foundGroupIndex; }, [initialEventGroupEntry, filteredEventGroupsEntries]); return { From 7841c38c9d30bd33901f2fd522a7424e23caac63 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Sat, 8 Nov 2025 23:48:30 +0100 Subject: [PATCH 27/58] optimize first page processing Signed-off-by: Assem Hafez --- .../helpers/workflow-history-fetcher.ts | 3 ++- .../helpers/workflow-history-grouper.ts | 11 +++++++---- .../hooks/use-workflow-history-fetcher.ts | 4 ++-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.ts index a43dfaa31..a10962ce1 100644 --- a/src/views/workflow-history/helpers/workflow-history-fetcher.ts +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.ts @@ -126,7 +126,8 @@ export default class WorkflowHistoryFetcher { url: `/api/domains/${params.domain}/${params.cluster}/workflows/${params.workflowId}/${params.runId}/history`, query: { nextPage: pageParam, - pageSize: params.pageSize, + // TODO use constants/queryParams for page size + pageSize: pageParam ? 1000 : 200, waitForNewEvent: params.waitForNewEvent ?? false, } satisfies WorkflowHistoryQueryParams, }) diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.ts b/src/views/workflow-history/helpers/workflow-history-grouper.ts index 7425bc6fd..927994041 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.ts @@ -187,20 +187,23 @@ export default class WorkflowHistoryGrouper { * Uses Scheduler API if available, otherwise falls back to Promise microtask. */ private scheduleNextBatch() { - if ( + // if first batch process immediately, this helps avoiding UI delays + if (this.lastProcessedEventIndex === -1) { + this.processBatch(); + } else if ( typeof window !== 'undefined' && 'scheduler' in window && 'postTask' in (window.scheduler as any) ) { - // Use Scheduler API with background priority for non-urgent work + // Use Scheduler API with background priority if available (window.scheduler as any) .postTask(() => this.processBatch(), { priority: 'background' }) .catch(() => { - // Fallback if postTask fails + // Fallback to setTimeout if postTask fails setTimeout(() => this.processBatch(), 0); }); } else { - // Fallback to Promise microtask + // Fallback to setTimeout setTimeout(() => this.processBatch(), 0); } } diff --git a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts index 1f7a1de39..4b40786d1 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts @@ -28,6 +28,8 @@ export default function useWorkflowHistoryFetcher( if (!fetcherRef.current) { fetcherRef.current = new WorkflowHistoryFetcher(queryClient, params); + // Fetch first page + fetcherRef.current.start((state) => !state?.data?.pages?.length); } const [historyQuery, setHistoryQuery] = useThrottledState< @@ -61,8 +63,6 @@ export default function useWorkflowHistoryFetcher( useEffect(() => { if (!fetcherRef.current) return; - // Fetch first page - fetcherRef.current.start((state) => !state?.data?.pages?.length); return () => { fetcherRef.current?.destroy(); }; From b91b8b7db83769a3b6295bdc9c863f87cd7603e5 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Sun, 9 Nov 2025 01:31:02 +0100 Subject: [PATCH 28/58] fix test cases Signed-off-by: Assem Hafez --- .../__tests__/workflow-history.test.tsx | 10 +++++++++- .../workflow-history-grouper.test.tsx | 18 +++++++++++++++--- .../helpers/workflow-history-fetcher.ts | 12 +++++++----- .../hooks/use-workflow-history-fetcher.ts | 1 + 4 files changed, 32 insertions(+), 9 deletions(-) diff --git a/src/views/workflow-history/__tests__/workflow-history.test.tsx b/src/views/workflow-history/__tests__/workflow-history.test.tsx index 8109f4aad..df6fb5446 100644 --- a/src/views/workflow-history/__tests__/workflow-history.test.tsx +++ b/src/views/workflow-history/__tests__/workflow-history.test.tsx @@ -28,7 +28,7 @@ jest.mock('@/hooks/use-page-query-params/use-page-query-params', () => jest.fn(() => [{ historySelectedEventId: '1' }, jest.fn()]) ); -// Mock the hook to use minimal throttle delay for faster tests +// Mock the hooks to use minimal throttle delay for faster tests jest.mock('../hooks/use-workflow-history-fetcher', () => { const actual = jest.requireActual('../hooks/use-workflow-history-fetcher'); return { @@ -39,6 +39,14 @@ jest.mock('../hooks/use-workflow-history-fetcher', () => { }; }); +jest.mock('../hooks/use-workflow-history-grouper', () => { + const actual = jest.requireActual('../hooks/use-workflow-history-grouper'); + return { + __esModule: true, + default: jest.fn(() => actual.default(0)), // 0ms throttle for tests + }; +}); + jest.mock( '../workflow-history-compact-event-card/workflow-history-compact-event-card', () => jest.fn(() =>
Compact group Card
) diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx index 4a226b65f..31851b176 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx @@ -625,11 +625,18 @@ describe(WorkflowHistoryGrouper.name, () => { status: 'idle', }); - // Add events but don't wait - status should be processing + // Add events - with batchSize=1, first batch will be processed synchronously but subsequent batches will be async grouper.updateEvents(completedActivityTaskEvents); + + // Check state after first batch (might be synchronous) state = grouper.getState(); - expect(state.status).toBe('processing'); - expect(state.remainingEventsCount).toBeGreaterThan(0); + // First batch is processed immediately, so processedEventsCount should be at least 1 + expect(state.processedEventsCount).toBeGreaterThan(0); + + // If there are remaining events, status could be 'processing' + if (state.remainingEventsCount > 0) { + expect(state.status).toBe('processing'); + } // Wait for processing to complete await waitForProcessing(); @@ -677,6 +684,11 @@ function setup(options: Partial = {}) { // Helper function to wait for next processing cycle const waitForProcessing = async (timeout = 1000): Promise => { + // Check if already idle (processing completed synchronously) + if (grouper.getState().status === 'idle') { + return Promise.resolve(); + } + await new Promise((resolve, reject) => { const timeoutId = setTimeout(() => { // Remove this resolver from queue if it times out diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.ts index a10962ce1..e5c288d1d 100644 --- a/src/views/workflow-history/helpers/workflow-history-fetcher.ts +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.ts @@ -44,13 +44,17 @@ export default class WorkflowHistoryFetcher { if (shouldContinue) { this.shouldContinue = shouldContinue; } - // If already started, return - if (this.isStarted) return; + + // remove current listener (if exists) to have fresh emits only + this.unsubscribe?.(); + this.unsubscribe = null; + this.isStarted = true; let emitCount = 0; const currentState = this.observer.getCurrentResult(); const fetchedFirstPage = currentState.status !== 'pending'; - const shouldEnableQuery = !fetchedFirstPage && shouldContinue(currentState); + const shouldEnableQuery = + !fetchedFirstPage && this.shouldContinue(currentState); if (shouldEnableQuery) { this.observer.setOptions({ @@ -81,8 +85,6 @@ export default class WorkflowHistoryFetcher { emit(currentState); } - // remove current listener (if exists) and add new one - this.unsubscribe?.(); this.unsubscribe = this.observer.subscribe((res) => emit(res)); } diff --git a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts index 4b40786d1..3609e3bbf 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts @@ -28,6 +28,7 @@ export default function useWorkflowHistoryFetcher( if (!fetcherRef.current) { fetcherRef.current = new WorkflowHistoryFetcher(queryClient, params); + // Fetch first page fetcherRef.current.start((state) => !state?.data?.pages?.length); } From 4558d59b355da6855c6a3f29299fb0c1984090b7 Mon Sep 17 00:00:00 2001 From: Assem Hafez <137278762+Assem-Uber@users.noreply.github.com> Date: Fri, 7 Nov 2025 19:10:58 +0100 Subject: [PATCH 29/58] fix: History content negative z-index disallow clicks (#1069) * fix content clicks Signed-off-by: Assem Hafez * Update src/views/workflow-history/workflow-history.styles.ts Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Signed-off-by: Assem Hafez Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- src/views/workflow-history/workflow-history.styles.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/views/workflow-history/workflow-history.styles.ts b/src/views/workflow-history/workflow-history.styles.ts index c0ecfd289..bf2acd8cb 100644 --- a/src/views/workflow-history/workflow-history.styles.ts +++ b/src/views/workflow-history/workflow-history.styles.ts @@ -9,7 +9,7 @@ const cssStylesObj = { flexDirection: 'column', flex: 1, // This is to ensure the header section z-index is relative to this container and do not - // show above external elements like popvers and modals + // show above external elements like popovers and modals position: 'relative', zIndex: 0, }, From 877be83d3140ecd519f3dc0040448b9e5058a9ff Mon Sep 17 00:00:00 2001 From: Assem Hafez <137278762+Assem-Uber@users.noreply.github.com> Date: Mon, 10 Nov 2025 14:24:21 +0100 Subject: [PATCH 30/58] feat: Use fetcher in workflow history (#1064) * Create fetcher utility Signed-off-by: Assem Hafez * rename query Signed-off-by: Assem Hafez * Create hook for fetching history Signed-off-by: Assem Hafez * add configurable throttleMs to the hook Signed-off-by: Assem Hafez * use fetcher in workflow history * remove useKeepLoadingEvents Signed-off-by: Assem Hafez * update fetcher based on feedback Signed-off-by: Assem Hafez * rename unmout to destroy Signed-off-by: Assem Hafez * Create hook for fetching history Signed-off-by: Assem Hafez * move condition into executeImmediately Signed-off-by: Assem Hafez * update destroy in method Signed-off-by: Assem Hafez * fix type error with query params Signed-off-by: Assem Hafez --------- Signed-off-by: Assem Hafez --- .../__tests__/workflow-history-header.test.tsx | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/views/workflow-history/workflow-history-header/__tests__/workflow-history-header.test.tsx b/src/views/workflow-history/workflow-history-header/__tests__/workflow-history-header.test.tsx index f3b77a3ac..e7a151a52 100644 --- a/src/views/workflow-history/workflow-history-header/__tests__/workflow-history-header.test.tsx +++ b/src/views/workflow-history/workflow-history-header/__tests__/workflow-history-header.test.tsx @@ -227,8 +227,6 @@ function setup(props: Partial = {}) { cluster: 'test-cluster', workflowId: 'test-workflowId', runId: 'test-runId', - pageSize: 100, - waitForNewEvent: 'true', }, pageFiltersProps: { activeFiltersCount: 0, From 75f3a340dec3abfdda55933f2cf38e9a78d3bb99 Mon Sep 17 00:00:00 2001 From: Adhitya Mamallan Date: Wed, 12 Nov 2025 12:43:02 +0100 Subject: [PATCH 31/58] fix: Autocomplete for bool values (#1072) Fix workflows query autocomplete to suggest the correct tokens for boolean values Signed-off-by: Adhitya Mamallan --- .../__tests__/get-autocomplete-suggestions.test.ts | 3 ++- .../get-updated-query-text-with-suggestions.test.ts | 12 ++++++------ .../workflows-query-input.constants.ts | 2 +- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/views/shared/workflows-header/workflows-query-input/helpers/__tests__/get-autocomplete-suggestions.test.ts b/src/views/shared/workflows-header/workflows-query-input/helpers/__tests__/get-autocomplete-suggestions.test.ts index c21c884b0..2b44b4fd0 100644 --- a/src/views/shared/workflows-header/workflows-query-input/helpers/__tests__/get-autocomplete-suggestions.test.ts +++ b/src/views/shared/workflows-header/workflows-query-input/helpers/__tests__/get-autocomplete-suggestions.test.ts @@ -32,7 +32,8 @@ describe('getAutocompleteSuggestions', () => { }); it('suggests logical operators after a complete boolean value', () => { - const suggestionsAfterBoolean = getAutocompleteSuggestions('IsCron = TRUE'); + const suggestionsAfterBoolean = + getAutocompleteSuggestions('IsCron = "true"'); expect(suggestionsAfterBoolean).toEqual(LOGICAL_OPERATORS); }); diff --git a/src/views/shared/workflows-header/workflows-query-input/helpers/__tests__/get-updated-query-text-with-suggestions.test.ts b/src/views/shared/workflows-header/workflows-query-input/helpers/__tests__/get-updated-query-text-with-suggestions.test.ts index 462fb63ee..1621621e5 100644 --- a/src/views/shared/workflows-header/workflows-query-input/helpers/__tests__/get-updated-query-text-with-suggestions.test.ts +++ b/src/views/shared/workflows-header/workflows-query-input/helpers/__tests__/get-updated-query-text-with-suggestions.test.ts @@ -25,14 +25,14 @@ describe('getUpdatedQueryTextWithSuggestion', () => { expect(result).toBe('WorkflowID = "foo" AND '); }); - it('appends suggestion after boolean value TRUE', () => { - const result = getUpdatedQueryTextWithSuggestion('IsCron = TRUE', 'AND'); - expect(result).toBe('IsCron = TRUE AND '); + it('appends suggestion after boolean value "true"', () => { + const result = getUpdatedQueryTextWithSuggestion('IsCron = "true" ', 'AND'); + expect(result).toBe('IsCron = "true" AND '); }); - it('appends suggestion after boolean value FALSE', () => { - const result = getUpdatedQueryTextWithSuggestion('IsCron = FALSE', 'AND'); - expect(result).toBe('IsCron = FALSE AND '); + it('appends suggestion after boolean value "false"', () => { + const result = getUpdatedQueryTextWithSuggestion('IsCron = "false"', 'AND'); + expect(result).toBe('IsCron = "false" AND '); }); it('appends suggestion after BETWEEN operator', () => { diff --git a/src/views/shared/workflows-header/workflows-query-input/workflows-query-input.constants.ts b/src/views/shared/workflows-header/workflows-query-input/workflows-query-input.constants.ts index 8f66a71bb..1267aa627 100644 --- a/src/views/shared/workflows-header/workflows-query-input/workflows-query-input.constants.ts +++ b/src/views/shared/workflows-header/workflows-query-input/workflows-query-input.constants.ts @@ -47,7 +47,7 @@ export const STATUSES = [ '"timed_out"', ]; -export const BOOLEAN_VALUES = ['TRUE', 'FALSE']; +export const BOOLEAN_VALUES = ['"true"', '"false"']; export const TIME_FORMAT = '"YYYY-MM-DDTHH:MM:SS±HH:MM"'; From e9e389888ec44eef5195b6ffb2296e97e5cb36ed Mon Sep 17 00:00:00 2001 From: Adhitya Mamallan Date: Wed, 12 Nov 2025 15:07:24 +0100 Subject: [PATCH 32/58] Add feature flag for Failover History (#1070) Signed-off-by: Adhitya Mamallan --- src/config/dynamic/dynamic.config.ts | 12 ++++++++++++ .../dynamic/resolvers/failover-history-enabled.ts | 15 +++++++++++++++ .../dynamic/resolvers/schemas/resolver-schemas.ts | 4 ++++ .../config/__fixtures__/resolved-config-values.ts | 1 + 4 files changed, 32 insertions(+) create mode 100644 src/config/dynamic/resolvers/failover-history-enabled.ts diff --git a/src/config/dynamic/dynamic.config.ts b/src/config/dynamic/dynamic.config.ts index 978e19fd8..843939728 100644 --- a/src/config/dynamic/dynamic.config.ts +++ b/src/config/dynamic/dynamic.config.ts @@ -14,6 +14,7 @@ import { type ClustersConfigs } from './resolvers/clusters.types'; import cronListEnabled from './resolvers/cron-list-enabled'; import extendedDomainInfoEnabled from './resolvers/extended-domain-info-enabled'; import { type ExtendedDomainInfoEnabledConfig } from './resolvers/extended-domain-info-enabled.types'; +import failoverHistoryEnabled from './resolvers/failover-history-enabled'; import workflowActionsEnabled from './resolvers/workflow-actions-enabled'; import { type WorkflowActionsEnabledResolverParams, @@ -65,6 +66,12 @@ const dynamicConfigs: { 'request', true >; + FAILOVER_HISTORY_ENABLED: ConfigAsyncResolverDefinition< + undefined, + boolean, + 'request', + true + >; } = { CADENCE_WEB_PORT: { env: 'CADENCE_WEB_PORT', @@ -109,6 +116,11 @@ const dynamicConfigs: { evaluateOn: 'request', isPublic: true, }, + FAILOVER_HISTORY_ENABLED: { + resolver: failoverHistoryEnabled, + evaluateOn: 'request', + isPublic: true, + }, } as const; export default dynamicConfigs; diff --git a/src/config/dynamic/resolvers/failover-history-enabled.ts b/src/config/dynamic/resolvers/failover-history-enabled.ts new file mode 100644 index 000000000..95deb51e9 --- /dev/null +++ b/src/config/dynamic/resolvers/failover-history-enabled.ts @@ -0,0 +1,15 @@ +/** + * WIP: Returns whether failover history APIs and UI are enabled. + * + * To enable the failover history tab, set the CADENCE_FAILOVER_HISTORY_ENABLED env variable to true. + * For further customization, override the implementation of this resolver. + * + * Server version behaviour: + * - > 1.3.6 (still hasn't been released yet): The Failover History API will work as expected. + * - <= 1.3.6: The Failover History API will return a GRPC unimplemented error (maps to HTTP 404 in the client). + * + * @returns {Promise} Whether failover history UI is enabled. + */ +export default async function failoverHistoryEnabled(): Promise { + return process.env.CADENCE_FAILOVER_HISTORY_ENABLED === 'true'; +} diff --git a/src/config/dynamic/resolvers/schemas/resolver-schemas.ts b/src/config/dynamic/resolvers/schemas/resolver-schemas.ts index 41c31d7d6..7f36b9b98 100644 --- a/src/config/dynamic/resolvers/schemas/resolver-schemas.ts +++ b/src/config/dynamic/resolvers/schemas/resolver-schemas.ts @@ -63,6 +63,10 @@ const resolverSchemas: ResolverSchemas = { args: z.undefined(), returnType: z.boolean(), }, + FAILOVER_HISTORY_ENABLED: { + args: z.undefined(), + returnType: z.boolean(), + }, }; export default resolverSchemas; diff --git a/src/utils/config/__fixtures__/resolved-config-values.ts b/src/utils/config/__fixtures__/resolved-config-values.ts index 529754233..6fd54f813 100644 --- a/src/utils/config/__fixtures__/resolved-config-values.ts +++ b/src/utils/config/__fixtures__/resolved-config-values.ts @@ -42,5 +42,6 @@ const mockResolvedConfigValues: LoadedConfigResolvedValues = { }, WORKFLOW_DIAGNOSTICS_ENABLED: false, ARCHIVAL_DEFAULT_SEARCH_ENABLED: false, + FAILOVER_HISTORY_ENABLED: false, }; export default mockResolvedConfigValues; From ccf736d8ebe54cffb623761e362bc34d290ca29e Mon Sep 17 00:00:00 2001 From: Adhitya Mamallan Date: Thu, 13 Nov 2025 15:31:09 +0100 Subject: [PATCH 33/58] Add failover history tab (#1071) Signed-off-by: Adhitya Mamallan --- .../config/domain-page-tabs.config.ts | 20 ++- .../domain-page-failovers.tsx | 8 + .../__tests__/domain-page-tabs.test.tsx | 155 +++++++++++++----- .../domain-page-tabs/domain-page-tabs.tsx | 25 ++- 4 files changed, 166 insertions(+), 42 deletions(-) create mode 100644 src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx diff --git a/src/views/domain-page/config/domain-page-tabs.config.ts b/src/views/domain-page/config/domain-page-tabs.config.ts index 10954c6fd..a18070116 100644 --- a/src/views/domain-page/config/domain-page-tabs.config.ts +++ b/src/views/domain-page/config/domain-page-tabs.config.ts @@ -1,14 +1,21 @@ -import { MdArchive, MdListAlt, MdSettings, MdSort } from 'react-icons/md'; +import { + MdArchive, + MdListAlt, + MdSettings, + MdSort, + MdSyncAlt, +} from 'react-icons/md'; import DomainWorkflows from '@/views/domain-workflows/domain-workflows'; import DomainWorkflowsArchival from '@/views/domain-workflows-archival/domain-workflows-archival'; +import DomainPageFailovers from '../domain-page-failovers/domain-page-failovers'; import DomainPageMetadata from '../domain-page-metadata/domain-page-metadata'; import DomainPageSettings from '../domain-page-settings/domain-page-settings'; import type { DomainPageTabsConfig } from '../domain-page-tabs/domain-page-tabs.types'; const domainPageTabsConfig: DomainPageTabsConfig< - 'workflows' | 'metadata' | 'settings' | 'archival' + 'workflows' | 'metadata' | 'failovers' | 'settings' | 'archival' > = { workflows: { title: 'Workflows', @@ -28,6 +35,15 @@ const domainPageTabsConfig: DomainPageTabsConfig< actions: [{ kind: 'retry', label: 'Retry' }], }), }, + failovers: { + title: 'Failovers', + artwork: MdSyncAlt, + content: DomainPageFailovers, + getErrorConfig: () => ({ + message: 'Failed to load failovers', + actions: [{ kind: 'retry', label: 'Retry' }], + }), + }, settings: { title: 'Settings', artwork: MdSettings, diff --git a/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx b/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx new file mode 100644 index 000000000..019602f20 --- /dev/null +++ b/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx @@ -0,0 +1,8 @@ +'use client'; +import React from 'react'; + +import { type DomainPageTabContentProps } from '../domain-page-content/domain-page-content.types'; + +export default function DomainPageFailovers(_: DomainPageTabContentProps) { + return
WIP: Domain Page Failovers Tab
; +} diff --git a/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx b/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx index 54097daae..f6dce046b 100644 --- a/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx +++ b/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx @@ -1,8 +1,12 @@ -import React from 'react'; +import React, { Suspense } from 'react'; -import { render, screen, act, fireEvent } from '@/test-utils/rtl'; +import { HttpResponse } from 'msw'; + +import { render, screen, userEvent } from '@/test-utils/rtl'; + +import ErrorBoundary from '@/components/error-boundary/error-boundary'; +import { type GetConfigResponse } from '@/route-handlers/get-config/get-config.types'; -import domainPageTabsConfig from '../../config/domain-page-tabs.config'; import DomainPageTabs from '../domain-page-tabs'; const mockPushFn = jest.fn(); @@ -36,8 +40,21 @@ jest.mock('../../config/domain-page-tabs.config', () => ({ title: 'Workflows', artwork: () =>
, }, - 'page-2': { - title: 'Page 2', + metadata: { + title: 'Metadata', + artwork: () =>
, + }, + failovers: { + title: 'Failovers', + artwork: () =>
, + }, + settings: { + title: 'Settings', + artwork: () =>
, + }, + archival: { + title: 'Archival', + artwork: () =>
, }, })); @@ -45,31 +62,41 @@ jest.mock('../../domain-page-help/domain-page-help', () => jest.fn(() => ) ); -describe('DomainPageTabs', () => { +describe(DomainPageTabs.name, () => { afterEach(() => { jest.clearAllMocks(); }); - it('renders tabs titles correctly', () => { - render(); + it('renders tabs titles correctly with failover history disabled', async () => { + await setup({ enableFailoverHistory: false }); - Object.values(domainPageTabsConfig).forEach(({ title }) => { - expect(screen.getByText(title)).toBeInTheDocument(); - }); + expect(screen.getByText('Workflows')).toBeInTheDocument(); + expect(screen.getByText('Metadata')).toBeInTheDocument(); + expect(screen.getByText('Settings')).toBeInTheDocument(); + expect(screen.getByText('Archival')).toBeInTheDocument(); + expect(screen.queryByText('Failovers')).toBeNull(); }); - it('reroutes when new tab is clicked', () => { - render(); + it('renders tabs with failover history enabled', async () => { + await setup({ enableFailoverHistory: true }); - const page2Tab = screen.getByText('Page 2'); - act(() => { - fireEvent.click(page2Tab); - }); + expect(screen.getByText('Workflows')).toBeInTheDocument(); + expect(screen.getByText('Metadata')).toBeInTheDocument(); + expect(screen.getByText('Failovers')).toBeInTheDocument(); + expect(screen.getByText('Settings')).toBeInTheDocument(); + expect(screen.getByText('Archival')).toBeInTheDocument(); + }); - expect(mockPushFn).toHaveBeenCalledWith('page-2'); + it('reroutes when new tab is clicked', async () => { + const { user } = await setup({ enableFailoverHistory: false }); + + const metadataTab = await screen.findByText('Metadata'); + await user.click(metadataTab); + + expect(mockPushFn).toHaveBeenCalledWith('metadata'); }); - it('retains query params when new tab is clicked', () => { + it('retains query params when new tab is clicked', async () => { // TODO: this is a bit hacky, see if there is a better way to mock the window search property const originalWindow = window; window = Object.create(window); @@ -81,41 +108,95 @@ describe('DomainPageTabs', () => { writable: true, }); - render(); + const { user } = await setup({ enableFailoverHistory: false }); - const page2Tab = screen.getByText('Page 2'); - act(() => { - fireEvent.click(page2Tab); - }); + const metadataTab = await screen.findByText('Metadata'); + await user.click(metadataTab); expect(mockPushFn).toHaveBeenCalledWith( - 'page-2?queryParam1=one&queryParam2=two' + 'metadata?queryParam1=one&queryParam2=two' ); window = originalWindow; }); - it('renders tabs artworks correctly', () => { - render(); + it('renders tabs artworks correctly', async () => { + await setup({ enableFailoverHistory: false }); - Object.entries(domainPageTabsConfig).forEach(([key, { artwork }]) => { - if (typeof artwork !== 'undefined') - expect(screen.getByTestId(`${key}-artwork`)).toBeInTheDocument(); - else - expect(screen.queryByTestId(`${key}-artwork`)).not.toBeInTheDocument(); - }); + expect(screen.getByTestId('workflows-artwork')).toBeInTheDocument(); + expect(screen.getByTestId('metadata-artwork')).toBeInTheDocument(); + expect(screen.getByTestId('settings-artwork')).toBeInTheDocument(); + expect(screen.getByTestId('archival-artwork')).toBeInTheDocument(); + expect(screen.queryByTestId('failovers-artwork')).toBeNull(); }); - it('renders the help button as endEnhancer', () => { - render(); + it('handles errors gracefully', async () => { + await setup({ error: true }); + + expect( + await screen.findByText('Error: Failed to fetch config') + ).toBeInTheDocument(); + }); + + it('renders the help button as endEnhancer', async () => { + await setup({}); expect(screen.getByTestId('domain-page-help')).toBeInTheDocument(); expect(screen.getByText('Help Button')).toBeInTheDocument(); }); - it('renders the start workflow button', () => { - render(); + it('renders the start workflow button', async () => { + await setup({}); expect(screen.getByTestId('start-workflow-button')).toBeInTheDocument(); }); }); + +async function setup({ + error, + enableFailoverHistory, +}: { + error?: boolean; + enableFailoverHistory?: boolean; +}) { + const user = userEvent.setup(); + + render( +
Error: {error.message}
} + > + Loading...
}> + + + , + { + endpointsMocks: [ + { + path: '/api/config', + httpMethod: 'GET', + mockOnce: false, + httpResolver: async () => { + if (error) { + return HttpResponse.json( + { message: 'Failed to fetch config' }, + { status: 500 } + ); + } else { + return HttpResponse.json( + (enableFailoverHistory ?? + false) satisfies GetConfigResponse<'FAILOVER_HISTORY_ENABLED'> + ); + } + }, + }, + ], + } + ); + + if (!error) { + // Wait for the first tab to load + await screen.findByText('Workflows'); + } + + return { user }; +} diff --git a/src/views/domain-page/domain-page-tabs/domain-page-tabs.tsx b/src/views/domain-page/domain-page-tabs/domain-page-tabs.tsx index 242773776..98eaaf0c5 100644 --- a/src/views/domain-page/domain-page-tabs/domain-page-tabs.tsx +++ b/src/views/domain-page/domain-page-tabs/domain-page-tabs.tsx @@ -1,9 +1,11 @@ 'use client'; import React, { useMemo } from 'react'; +import omit from 'lodash/omit'; import { useRouter, useParams } from 'next/navigation'; import PageTabs from '@/components/page-tabs/page-tabs'; +import useSuspenseConfigValue from '@/hooks/use-config-value/use-suspense-config-value'; import decodeUrlParams from '@/utils/decode-url-params'; import domainPageTabsConfig from '../config/domain-page-tabs.config'; @@ -11,21 +13,38 @@ import DomainPageHelp from '../domain-page-help/domain-page-help'; import DomainPageStartWorkflowButton from '../domain-page-start-workflow-button/domain-page-start-workflow-button'; import { styled } from './domain-page-tabs.styles'; -import type { DomainPageTabsParams } from './domain-page-tabs.types'; +import { + type DomainPageTabName, + type DomainPageTabsParams, +} from './domain-page-tabs.types'; export default function DomainPageTabs() { const router = useRouter(); const params = useParams(); const decodedParams = decodeUrlParams(params) as DomainPageTabsParams; + const { data: isFailoverHistoryEnabled } = useSuspenseConfigValue( + 'FAILOVER_HISTORY_ENABLED' + ); + + const tabsConfig = useMemo>(() => { + const tabsToHide: Array = []; + + if (!isFailoverHistoryEnabled) { + tabsToHide.push('failovers'); + } + + return omit(domainPageTabsConfig, tabsToHide); + }, [isFailoverHistoryEnabled]); + const tabList = useMemo( () => - Object.entries(domainPageTabsConfig).map(([key, tabConfig]) => ({ + Object.entries(tabsConfig).map(([key, tabConfig]) => ({ key, title: tabConfig.title, artwork: tabConfig.artwork, })), - [] + [tabsConfig] ); return ( From 20eb80376ddf63ba5ee8d00f782bb5de222c2f74 Mon Sep 17 00:00:00 2001 From: Tim Chan Date: Thu, 13 Nov 2025 14:24:04 -0800 Subject: [PATCH 34/58] chore: Added ts-node to dev-deps (#1074) Signed-off-by: Tim Chan --- package-lock.json | 77 +++++++++++++++++++++++------------------------ package.json | 1 + 2 files changed, 39 insertions(+), 39 deletions(-) diff --git a/package-lock.json b/package-lock.json index 3447564b2..29c038555 100644 --- a/package-lock.json +++ b/package-lock.json @@ -76,6 +76,7 @@ "prettier": "3.2.5", "styletron-engine-snapshot": "^1.0.2", "ts-jest": "^29.1.2", + "ts-node": "^10.9.2", "tstyche": "^3.0.0", "tsx": "^4.19.3", "typescript": "^5.3.3", @@ -142,6 +143,7 @@ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.4.tgz", "integrity": "sha512-MBVlMXP+kkl5394RBLSxxk/iLTeVGuXTV3cIDXavPpMMqnSnt6apKgan/U8O3USWZCWZT/TbgfEpKa4uMgN4Dg==", "dev": true, + "peer": true, "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.24.2", @@ -765,8 +767,6 @@ "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", "dev": true, - "optional": true, - "peer": true, "dependencies": { "@jridgewell/trace-mapping": "0.3.9" }, @@ -779,8 +779,6 @@ "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", "dev": true, - "optional": true, - "peer": true, "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" @@ -1764,6 +1762,7 @@ "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", "dev": true, + "peer": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/expect": "^29.7.0", @@ -2304,6 +2303,7 @@ "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", "license": "Apache-2.0", + "peer": true, "engines": { "node": ">=8.0.0" } @@ -3072,6 +3072,7 @@ "version": "5.51.1", "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.51.1.tgz", "integrity": "sha512-s47HKFnQ4HOJAHoIiXcpna/roMMPZJPy6fJ6p4ZNVn8+/onlLBEDd1+xc8OnDuwgvecqkZD7Z2mnSRbcWefrKw==", + "peer": true, "dependencies": { "@tanstack/query-core": "5.51.1" }, @@ -3224,33 +3225,25 @@ "version": "1.0.11", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz", "integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==", - "dev": true, - "optional": true, - "peer": true + "dev": true }, "node_modules/@tsconfig/node12": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", - "dev": true, - "optional": true, - "peer": true + "dev": true }, "node_modules/@tsconfig/node14": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", - "dev": true, - "optional": true, - "peer": true + "dev": true }, "node_modules/@tsconfig/node16": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", - "dev": true, - "optional": true, - "peer": true + "dev": true }, "node_modules/@types/aria-query": { "version": "5.0.4", @@ -3404,6 +3397,7 @@ "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.12.tgz", "integrity": "sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==", "dev": true, + "peer": true, "dependencies": { "expect": "^29.0.0", "pretty-format": "^29.0.0" @@ -3497,6 +3491,7 @@ "version": "20.14.10", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.10.tgz", "integrity": "sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==", + "peer": true, "dependencies": { "undici-types": "~5.26.4" } @@ -3510,6 +3505,7 @@ "version": "18.2.64", "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.64.tgz", "integrity": "sha512-MlmPvHgjj2p3vZaxbQgFUQFvD8QiZwACfGqEdDSWou5yISWxDQ4/74nCAwsUiX7UFLKZz3BbVSPj+YxeoGGCfg==", + "peer": true, "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -3619,6 +3615,7 @@ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.13.0.tgz", "integrity": "sha512-EjMfl69KOS9awXXe83iRN7oIEXy9yYdqWfqdrFAYAAr6syP8eLEFI7ZE4939antx2mNgPRW/o1ybm2SFYkbTVA==", "dev": true, + "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "7.13.0", "@typescript-eslint/types": "7.13.0", @@ -3818,6 +3815,7 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -3956,9 +3954,7 @@ "version": "4.1.3", "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true, - "optional": true, - "peer": true + "dev": true }, "node_modules/argparse": { "version": "2.0.1", @@ -4490,6 +4486,7 @@ "url": "https://github.com/sponsors/ai" } ], + "peer": true, "dependencies": { "caniuse-lite": "^1.0.30001587", "electron-to-chromium": "^1.4.668", @@ -4918,9 +4915,7 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", - "dev": true, - "optional": true, - "peer": true + "dev": true }, "node_modules/credit-card-type": { "version": "8.3.0", @@ -5227,7 +5222,8 @@ "node_modules/d3-selection": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-2.0.0.tgz", - "integrity": "sha512-XoGGqhLUN/W14NmaqcO/bb1nqjDAw5WtSYb2X8wiuQWvSZUsUVYsOSkOybUrNvcBjaywBdYPy03eXHMXjk9nZA==" + "integrity": "sha512-XoGGqhLUN/W14NmaqcO/bb1nqjDAw5WtSYb2X8wiuQWvSZUsUVYsOSkOybUrNvcBjaywBdYPy03eXHMXjk9nZA==", + "peer": true }, "node_modules/d3-shape": { "version": "2.1.0", @@ -5309,6 +5305,7 @@ "version": "2.30.0", "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz", "integrity": "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==", + "peer": true, "dependencies": { "@babel/runtime": "^7.21.0" }, @@ -5503,8 +5500,6 @@ "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", "dev": true, - "optional": true, - "peer": true, "engines": { "node": ">=0.3.1" } @@ -5591,7 +5586,6 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/emitter-component/-/emitter-component-1.1.2.tgz", "integrity": "sha512-QdXO3nXOzZB4pAjM0n6ZE+R9/+kPpECA/XSELIcc54NeYVnBqIk+4DFiBgK+8QbV3mdvTG6nedl7dTYgO+5wDw==", - "peer": true, "funding": { "url": "https://github.com/sponsors/sindresorhus" } @@ -5892,6 +5886,7 @@ "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", "dev": true, + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", @@ -6100,6 +6095,7 @@ "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", "dev": true, + "peer": true, "bin": { "eslint-config-prettier": "bin/cli.js" }, @@ -6195,6 +6191,7 @@ "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", "dev": true, + "peer": true, "dependencies": { "array-includes": "^3.1.7", "array.prototype.findlastindex": "^1.2.3", @@ -8028,6 +8025,7 @@ "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", "dev": true, + "peer": true, "dependencies": { "@jest/core": "^29.7.0", "@jest/types": "^29.6.3", @@ -8368,6 +8366,7 @@ "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-29.7.0.tgz", "integrity": "sha512-k9iQbsf9OyOfdzWH8HDmrRT0gSIcX+FLNW7IQq94tFX0gynPwqDTW0Ho6iMVNjGz/nb+l/vW3dWM2bbLLpkbXA==", "dev": true, + "peer": true, "dependencies": { "@jest/environment": "^29.7.0", "@jest/fake-timers": "^29.7.0", @@ -9128,8 +9127,7 @@ "node_modules/keycharm": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/keycharm/-/keycharm-0.2.0.tgz", - "integrity": "sha512-i/XBRTiLqRConPKioy2oq45vbv04e8x59b0mnsIRQM+7Ec/8BC7UcL5pnC4FMeGb8KwG7q4wOMw7CtNZf5tiIg==", - "peer": true + "integrity": "sha512-i/XBRTiLqRConPKioy2oq45vbv04e8x59b0mnsIRQM+7Ec/8BC7UcL5pnC4FMeGb8KwG7q4wOMw7CtNZf5tiIg==" }, "node_modules/keyv": { "version": "4.5.4", @@ -9310,6 +9308,7 @@ "version": "1.13.3", "resolved": "https://registry.npmjs.org/mapbox-gl/-/mapbox-gl-1.13.3.tgz", "integrity": "sha512-p8lJFEiqmEQlyv+DQxFAOG/XPWN0Wp7j/Psq93Zywz7qt9CcUKFYDBOoOEKzqe6gudHVJY8/Bhqw6VDpX2lSBg==", + "peer": true, "dependencies": { "@mapbox/geojson-rewind": "^0.5.2", "@mapbox/geojson-types": "^1.0.2", @@ -10322,6 +10321,7 @@ "version": "2.30.1", "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", "integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==", + "peer": true, "engines": { "node": "*" } @@ -10430,6 +10430,7 @@ "resolved": "https://registry.npmjs.org/next/-/next-14.2.31.tgz", "integrity": "sha512-Wyw1m4t8PhqG+or5a1U/Deb888YApC4rAez9bGhHkTsfwAy4SWKVro0GhEx4sox1856IbLhvhce2hAA6o8vkog==", "license": "MIT", + "peer": true, "dependencies": { "@next/env": "14.2.31", "@swc/helpers": "0.5.5", @@ -11122,6 +11123,7 @@ "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", "dev": true, + "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -11200,6 +11202,7 @@ "version": "15.8.1", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "peer": true, "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", @@ -11233,7 +11236,6 @@ "resolved": "https://registry.npmjs.org/propagating-hammerjs/-/propagating-hammerjs-1.5.0.tgz", "integrity": "sha512-3PUXWmomwutoZfydC+lJwK1bKCh6sK6jZGB31RUX6+4EXzsbkDZrK4/sVR7gBrvJaEIwpTVyxQUAd29FKkmVdw==", "license": "MIT", - "peer": true, "dependencies": { "hammerjs": "^2.0.8" } @@ -11385,6 +11387,7 @@ "version": "18.2.0", "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", + "peer": true, "dependencies": { "loose-envify": "^1.1.0" }, @@ -11407,6 +11410,7 @@ "version": "18.2.0", "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", + "peer": true, "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.0" @@ -11469,6 +11473,7 @@ "version": "7.52.0", "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.52.0.tgz", "integrity": "sha512-mJX506Xc6mirzLsmXUJyqlAI3Kj9Ph2RhplYhUVffeOQSnubK2uVqBFOBJmvKikvbFV91pxVXmDiR+QMF19x6A==", + "peer": true, "engines": { "node": ">=12.22.0" }, @@ -12688,6 +12693,7 @@ "version": "6.1.1", "resolved": "https://registry.npmjs.org/styletron-react/-/styletron-react-6.1.1.tgz", "integrity": "sha512-K04BwKZTrdRG/wR5BaFG8z0bFu1jkT2HAp0UP5ZeMAKW6Ix8J3yuROWLoLUMZafaRRQ9LjiLpIl65u75L7YZow==", + "peer": true, "dependencies": { "prop-types": "^15.6.0", "styletron-standard": "^3.1.0" @@ -12700,6 +12706,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/styletron-standard/-/styletron-standard-3.1.0.tgz", "integrity": "sha512-Cr2q0IFsag6OaIeD/LBNRuCxNTPa/WtTbKP1X3o50mDudN8FGwmD5h1sMJ/Bu5+mO/2NfrNAv9V9zUXn6lXXMA==", + "peer": true, "dependencies": { "@rtsao/csstype": "2.6.5-forked.0", "csstype": "^3.0.0", @@ -12984,7 +12991,6 @@ "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", "dev": true, - "optional": true, "peer": true, "dependencies": { "@cspotcode/source-map-support": "^0.8.0", @@ -13196,6 +13202,7 @@ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.2.tgz", "integrity": "sha512-+2/g0Fds1ERlP6JsakQQDXjZdZMM+rqpamFZJEKh4kwTIn3iDkgKtby0CeNd5ATNZ4Ry1ax15TMx0W2V+miizQ==", "dev": true, + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -13467,9 +13474,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", - "dev": true, - "optional": true, - "peer": true + "dev": true }, "node_modules/v8-to-istanbul": { "version": "9.2.0", @@ -13527,7 +13532,6 @@ "integrity": "sha512-xmujDB2Dzf8T04rGFJ9OP4OA6zRVrz8R9hb0CVKryBrZRCljCga9JjSfgctA8S7wdZu7otDtUIwX4ZOgfV/57w==", "hasInstallScript": true, "license": "(Apache-2.0 OR MIT)", - "peer": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/visjs" @@ -13544,7 +13548,6 @@ "integrity": "sha512-8U1qOS3XppFMd9vHTuaejYeXdtMm057C+5PCIWf6c+uhevS+ECYqUrEYzHoYZurijViEVekxfNezi9Bc/szlpA==", "hasInstallScript": true, "license": "(Apache-2.0 OR MIT)", - "peer": true, "dependencies": { "emitter-component": "^1.1.1", "hammerjs": "^2.0.8", @@ -13560,7 +13563,6 @@ "resolved": "https://registry.npmjs.org/vis-util/-/vis-util-1.1.10.tgz", "integrity": "sha512-8hGSxsFi2ogYYweClQyITzWnirWgQ8p0i9M4d3OXMuUO8vjXrf+2zHOYI9OZbtUduxAWuMEePnS9BXDtPJmJ7Q==", "license": "(Apache-2.0 OR MIT)", - "peer": true, "dependencies": { "moment": "2.24.0", "vis-uuid": "1.1.3" @@ -13593,7 +13595,6 @@ "integrity": "sha512-2B6XdY1bkzbUh+TugmnAaFa61KO9R5pzBzIuFIm8a9FrkbxIdSmQXV+FbfkL8QunkQV/bT0JDLQ2puqCS2+0Og==", "deprecated": "We don't use this library anymore so you shouldn't either. Use e.g. 'uuid' instead!", "license": "(Apache-2.0 OR MIT)", - "peer": true, "engines": { "node": ">=8" } @@ -14000,8 +14001,6 @@ "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", "dev": true, - "optional": true, - "peer": true, "engines": { "node": ">=6" } diff --git a/package.json b/package.json index 711ee0af6..b8209d283 100644 --- a/package.json +++ b/package.json @@ -95,6 +95,7 @@ "pino-pretty": "^11.2.2", "prettier": "3.2.5", "styletron-engine-snapshot": "^1.0.2", + "ts-node": "^10.9.2", "ts-jest": "^29.1.2", "tstyche": "^3.0.0", "tsx": "^4.19.3", From 7e44676f615930368ebdafb96b89b9ef5c9726b9 Mon Sep 17 00:00:00 2001 From: Adhitya Mamallan Date: Fri, 14 Nov 2025 14:29:20 +0100 Subject: [PATCH 35/58] feat: Hook to fetch & filter Failover History of a domain (#1075) Add hook for fetching and filtering failover history Add helper function to check if a ClusterFailover matches a given cluster attribute Add some types/constants for domain page failovers Signed-off-by: Adhitya Mamallan --- .../list-failover-history.types.ts | 3 + .../domain-page-failovers.constants.ts | 12 + .../domain-page-failovers.types.ts | 3 + ...cluster-failover-matches-attribute.test.ts | 189 ++++++++++++ .../cluster-failover-matches-attribute.ts | 16 + .../use-domain-failover-history.test.ts | 276 ++++++++++++++++++ ...t-domain-failover-history-query-options.ts | 33 +++ .../use-domain-failover-history.ts | 44 +++ .../use-domain-failover-history.types.ts | 36 +++ 9 files changed, 612 insertions(+) create mode 100644 src/views/domain-page/domain-page-failovers/domain-page-failovers.constants.ts create mode 100644 src/views/domain-page/domain-page-failovers/domain-page-failovers.types.ts create mode 100644 src/views/domain-page/helpers/__tests__/cluster-failover-matches-attribute.test.ts create mode 100644 src/views/domain-page/helpers/cluster-failover-matches-attribute.ts create mode 100644 src/views/domain-page/hooks/use-domain-failover-history/__tests__/use-domain-failover-history.test.ts create mode 100644 src/views/domain-page/hooks/use-domain-failover-history/get-domain-failover-history-query-options.ts create mode 100644 src/views/domain-page/hooks/use-domain-failover-history/use-domain-failover-history.ts create mode 100644 src/views/domain-page/hooks/use-domain-failover-history/use-domain-failover-history.types.ts diff --git a/src/route-handlers/list-failover-history/list-failover-history.types.ts b/src/route-handlers/list-failover-history/list-failover-history.types.ts index 5f2815816..1cce5d70a 100644 --- a/src/route-handlers/list-failover-history/list-failover-history.types.ts +++ b/src/route-handlers/list-failover-history/list-failover-history.types.ts @@ -1,5 +1,6 @@ import { type z } from 'zod'; +import { type FailoverEvent as FailoverEventProto } from '@/__generated__/proto-ts/uber/cadence/api/v1/FailoverEvent'; import { type ListFailoverHistoryResponse as ListFailoverHistoryResponseProto } from '@/__generated__/proto-ts/uber/cadence/api/v1/ListFailoverHistoryResponse'; import { type DefaultMiddlewaresContext } from '@/utils/route-handlers-middleware'; @@ -20,4 +21,6 @@ export type ListFailoverHistoryRequestQueryParams = z.input< export type ListFailoverHistoryResponse = ListFailoverHistoryResponseProto; +export type FailoverEvent = FailoverEventProto; + export type Context = DefaultMiddlewaresContext; diff --git a/src/views/domain-page/domain-page-failovers/domain-page-failovers.constants.ts b/src/views/domain-page/domain-page-failovers/domain-page-failovers.constants.ts new file mode 100644 index 000000000..4dcb51dc2 --- /dev/null +++ b/src/views/domain-page/domain-page-failovers/domain-page-failovers.constants.ts @@ -0,0 +1,12 @@ +import { type FailoverEvent } from '@/route-handlers/list-failover-history/list-failover-history.types'; + +export const PRIMARY_CLUSTER_SCOPE = 'primary'; + +export const FAILOVER_TYPE_LABEL_MAP: Record< + FailoverEvent['failoverType'], + string +> = { + FAILOVER_TYPE_INVALID: 'Invalid', + FAILOVER_TYPE_FORCE: 'Force', + FAILOVER_TYPE_GRACEFUL: 'Graceful', +}; diff --git a/src/views/domain-page/domain-page-failovers/domain-page-failovers.types.ts b/src/views/domain-page/domain-page-failovers/domain-page-failovers.types.ts new file mode 100644 index 000000000..e996ab191 --- /dev/null +++ b/src/views/domain-page/domain-page-failovers/domain-page-failovers.types.ts @@ -0,0 +1,3 @@ +import { type FailoverEvent } from '@/route-handlers/list-failover-history/list-failover-history.types'; + +export type ClusterFailover = FailoverEvent['clusterFailovers'][number]; diff --git a/src/views/domain-page/helpers/__tests__/cluster-failover-matches-attribute.test.ts b/src/views/domain-page/helpers/__tests__/cluster-failover-matches-attribute.test.ts new file mode 100644 index 000000000..efd0066ca --- /dev/null +++ b/src/views/domain-page/helpers/__tests__/cluster-failover-matches-attribute.test.ts @@ -0,0 +1,189 @@ +import { PRIMARY_CLUSTER_SCOPE } from '../../domain-page-failovers/domain-page-failovers.constants'; +import { type ClusterFailover } from '../../domain-page-failovers/domain-page-failovers.types'; +import clusterFailoverMatchesAttribute from '../cluster-failover-matches-attribute'; + +describe(clusterFailoverMatchesAttribute.name, () => { + it('should return true when clusterAttribute is null and scope is PRIMARY_CLUSTER_SCOPE', () => { + const clusterFailover: ClusterFailover = { + fromCluster: { + activeClusterName: 'cluster1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster2', + failoverVersion: '2', + }, + clusterAttribute: null, + }; + + expect( + clusterFailoverMatchesAttribute(clusterFailover, PRIMARY_CLUSTER_SCOPE) + ).toBe(true); + }); + + it('should return false when clusterAttribute is null and scope is not PRIMARY_CLUSTER_SCOPE', () => { + const clusterFailover: ClusterFailover = { + fromCluster: { + activeClusterName: 'cluster1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster2', + failoverVersion: '2', + }, + clusterAttribute: null, + }; + + expect( + clusterFailoverMatchesAttribute(clusterFailover, 'other-scope') + ).toBe(false); + }); + + it('should return false when clusterAttribute is null and scope is undefined', () => { + const clusterFailover: ClusterFailover = { + fromCluster: { + activeClusterName: 'cluster1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster2', + failoverVersion: '2', + }, + clusterAttribute: null, + }; + + expect(clusterFailoverMatchesAttribute(clusterFailover)).toBe(false); + }); + + it('should return true when clusterAttribute scope matches and no value is provided', () => { + const clusterFailover: ClusterFailover = { + fromCluster: { + activeClusterName: 'cluster1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }; + + expect(clusterFailoverMatchesAttribute(clusterFailover, 'city')).toBe(true); + }); + + it('should return false when clusterAttribute scope does not match and no value is provided', () => { + const clusterFailover: ClusterFailover = { + fromCluster: { + activeClusterName: 'cluster1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }; + + expect( + clusterFailoverMatchesAttribute(clusterFailover, 'other-scope') + ).toBe(false); + }); + + it('should return true when clusterAttribute scope and name both match', () => { + const clusterFailover: ClusterFailover = { + fromCluster: { + activeClusterName: 'cluster1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }; + + expect( + clusterFailoverMatchesAttribute(clusterFailover, 'city', 'new_york') + ).toBe(true); + }); + + it('should return false when clusterAttribute scope matches but name does not match', () => { + const clusterFailover: ClusterFailover = { + fromCluster: { + activeClusterName: 'cluster1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }; + + expect( + clusterFailoverMatchesAttribute(clusterFailover, 'city', 'san_francisco') + ).toBe(false); + }); + + it('should return false when clusterAttribute scope does not match even if name matches', () => { + const clusterFailover: ClusterFailover = { + fromCluster: { + activeClusterName: 'cluster1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }; + + expect( + clusterFailoverMatchesAttribute( + clusterFailover, + 'other-scope', + 'new_york' + ) + ).toBe(false); + }); + + it('should return false when clusterAttribute scope does not match and value is provided', () => { + const clusterFailover: ClusterFailover = { + fromCluster: { + activeClusterName: 'cluster1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }; + + expect( + clusterFailoverMatchesAttribute( + clusterFailover, + 'other-scope', + 'san_francisco' + ) + ).toBe(false); + }); +}); diff --git a/src/views/domain-page/helpers/cluster-failover-matches-attribute.ts b/src/views/domain-page/helpers/cluster-failover-matches-attribute.ts new file mode 100644 index 000000000..9f15fda8b --- /dev/null +++ b/src/views/domain-page/helpers/cluster-failover-matches-attribute.ts @@ -0,0 +1,16 @@ +import { PRIMARY_CLUSTER_SCOPE } from '../domain-page-failovers/domain-page-failovers.constants'; +import { type ClusterFailover } from '../domain-page-failovers/domain-page-failovers.types'; + +export default function clusterFailoverMatchesAttribute( + clusterFailover: ClusterFailover, + scope?: string, + value?: string +) { + const attribute = clusterFailover.clusterAttribute; + if (attribute === null) return scope === PRIMARY_CLUSTER_SCOPE; + + const scopeMatches = attribute.scope === scope; + if (!value) return scopeMatches; + + return scopeMatches && attribute.name === value; +} diff --git a/src/views/domain-page/hooks/use-domain-failover-history/__tests__/use-domain-failover-history.test.ts b/src/views/domain-page/hooks/use-domain-failover-history/__tests__/use-domain-failover-history.test.ts new file mode 100644 index 000000000..b7bb770cf --- /dev/null +++ b/src/views/domain-page/hooks/use-domain-failover-history/__tests__/use-domain-failover-history.test.ts @@ -0,0 +1,276 @@ +import { HttpResponse } from 'msw'; +import { act } from 'react-dom/test-utils'; + +import { renderHook, waitFor } from '@/test-utils/rtl'; + +import { type ListFailoverHistoryResponse } from '@/route-handlers/list-failover-history/list-failover-history.types'; +import { type ClusterFailover } from '@/views/domain-page/domain-page-failovers/domain-page-failovers.types'; + +import * as clusterFailoverMatchesAttributeModule from '../../../helpers/cluster-failover-matches-attribute'; +import useDomainFailoverHistory from '../use-domain-failover-history'; + +jest.mock('../../../helpers/cluster-failover-matches-attribute', () => + jest.fn(() => true) +); + +describe(useDomainFailoverHistory.name, () => { + it('should return loading state initially', () => { + const { result } = setup({}); + + expect(result.current.isLoading).toBe(true); + expect(result.current.allFailoverEvents).toEqual([]); + expect(result.current.filteredFailoverEvents).toEqual([]); + }); + + it('should return failover events when data is loaded', async () => { + const mockFailoverEvents = createMockFailoverEvents(2); + const { result } = setup({ + failoverResponse: { + failoverEvents: mockFailoverEvents, + nextPageToken: '', + }, + }); + + await waitFor(() => { + expect(result.current.isLoading).toBe(false); + }); + + expect(result.current.allFailoverEvents).toEqual(mockFailoverEvents); + expect(result.current.filteredFailoverEvents).toEqual(mockFailoverEvents); + }); + + it('should return empty arrays when no failover events are returned', async () => { + const { result } = setup({ + failoverResponse: { + failoverEvents: [], + nextPageToken: '', + }, + }); + + await waitFor(() => { + expect(result.current.isLoading).toBe(false); + }); + + expect(result.current.allFailoverEvents).toEqual([]); + expect(result.current.filteredFailoverEvents).toEqual([]); + }); + + it('should filter failover events when clusterAttributeScope is provided', async () => { + const mockFailoverEvents = createMockFailoverEvents(3); + const clusterFailover1 = mockFailoverEvents[0].clusterFailovers[0]; + const clusterFailover2 = mockFailoverEvents[1].clusterFailovers[0]; + const clusterFailover3 = mockFailoverEvents[2].clusterFailovers[0]; + + const { result, clusterFailoverMatchesAttributeSpy } = setup({ + failoverResponse: { + failoverEvents: mockFailoverEvents, + nextPageToken: '', + }, + clusterAttributeScope: 'scope-0', + clusterAttributeValue: 'name-0', + }); + + clusterFailoverMatchesAttributeSpy.mockImplementation( + (clusterFailover: ClusterFailover) => { + return ( + clusterFailover.clusterAttribute?.scope === 'scope-0' && + clusterFailover.clusterAttribute?.name === 'name-0' + ); + } + ); + + await waitFor(() => { + expect(result.current.isLoading).toBe(false); + }); + + expect(result.current.allFailoverEvents).toEqual(mockFailoverEvents); + expect(result.current.filteredFailoverEvents).toEqual([ + mockFailoverEvents[0], + ]); + + expect(clusterFailoverMatchesAttributeSpy).toHaveBeenCalledTimes(3); + expect(clusterFailoverMatchesAttributeSpy).toHaveBeenCalledWith( + clusterFailover1, + 'scope-0', + 'name-0' + ); + expect(clusterFailoverMatchesAttributeSpy).toHaveBeenCalledWith( + clusterFailover2, + 'scope-0', + 'name-0' + ); + expect(clusterFailoverMatchesAttributeSpy).toHaveBeenCalledWith( + clusterFailover3, + 'scope-0', + 'name-0' + ); + }); + + it('should return all failover events when clusterAttributeScope is not provided', async () => { + const mockFailoverEvents = createMockFailoverEvents(3); + + const { result, clusterFailoverMatchesAttributeSpy } = setup({ + failoverResponse: { + failoverEvents: mockFailoverEvents, + nextPageToken: '', + }, + }); + + await waitFor(() => { + expect(result.current.isLoading).toBe(false); + }); + + expect(result.current.allFailoverEvents).toEqual(mockFailoverEvents); + expect(result.current.filteredFailoverEvents).toEqual(mockFailoverEvents); + expect(clusterFailoverMatchesAttributeSpy).not.toHaveBeenCalled(); + }); + + it('should handle multiple pages of failover events', async () => { + const firstPageEvents = createMockFailoverEvents(2); + const secondPageEvents = createMockFailoverEvents(2, 2); + + let pageCount = 0; + const { result } = setup({ + failoverResponse: () => { + pageCount++; + if (pageCount === 1) { + return { + failoverEvents: firstPageEvents, + nextPageToken: 'token-1', + }; + } + return { + failoverEvents: secondPageEvents, + nextPageToken: '', + }; + }, + }); + + await waitFor(() => { + expect(result.current.isLoading).toBe(false); + }); + + expect(result.current.allFailoverEvents).toEqual(firstPageEvents); + + act(() => { + result.current.fetchNextPage(); + }); + + await waitFor(() => { + expect(result.current.isFetchingNextPage).toBe(false); + }); + + expect(result.current.allFailoverEvents).toEqual([ + ...firstPageEvents, + ...secondPageEvents, + ]); + }); + + it('should handle API errors', async () => { + const { result } = setup({ error: true }); + + await waitFor(() => { + expect(result.current.status).toBe('error'); + }); + + expect(result.current.allFailoverEvents).toEqual([]); + expect(result.current.filteredFailoverEvents).toEqual([]); + }); +}); + +function setup({ + failoverResponse, + error = false, + clusterAttributeScope, + clusterAttributeValue, +}: { + failoverResponse?: + | ListFailoverHistoryResponse + | (() => ListFailoverHistoryResponse); + error?: boolean; + clusterAttributeScope?: string; + clusterAttributeValue?: string; +} = {}) { + const clusterFailoverMatchesAttributeSpy = jest.spyOn( + clusterFailoverMatchesAttributeModule, + 'default' + ); + + clusterFailoverMatchesAttributeSpy.mockReturnValue(true); + + const defaultResponse: ListFailoverHistoryResponse = { + failoverEvents: [], + nextPageToken: '', + }; + + return { + ...renderHook( + () => + useDomainFailoverHistory({ + domainName: 'test-domain', + domainId: 'test-domain-id', + cluster: 'test-cluster', + clusterAttributeScope, + clusterAttributeValue, + }), + { + endpointsMocks: [ + { + path: '/api/domains/:domain/:cluster/failovers', + httpMethod: 'GET', + mockOnce: false, + httpResolver: async () => { + if (error) { + return HttpResponse.json( + { message: 'Failed to fetch failover history' }, + { status: 500 } + ); + } + + const response = + typeof failoverResponse === 'function' + ? failoverResponse() + : failoverResponse ?? defaultResponse; + + return HttpResponse.json(response); + }, + }, + ], + } + ), + clusterFailoverMatchesAttributeSpy, + }; +} + +function createMockFailoverEvents( + count: number, + startIndex = 0 +): ListFailoverHistoryResponse['failoverEvents'] { + return Array.from({ length: count }, (_, i) => + createMockFailoverEvent({ + clusterFailovers: [ + { + clusterAttribute: { + scope: `scope-${startIndex + i}`, + name: `name-${startIndex + i}`, + }, + }, + ], + }) + ); +} + +function createMockFailoverEvent({ + clusterFailovers = [], +}: { + clusterFailovers?: Array<{ + clusterAttribute: { scope: string; name: string } | null; + }>; +}): ListFailoverHistoryResponse['failoverEvents'][number] { + return { + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: clusterFailovers.map((cf) => ({ + clusterAttribute: cf.clusterAttribute, + })), + } as ListFailoverHistoryResponse['failoverEvents'][number]; +} diff --git a/src/views/domain-page/hooks/use-domain-failover-history/get-domain-failover-history-query-options.ts b/src/views/domain-page/hooks/use-domain-failover-history/get-domain-failover-history-query-options.ts new file mode 100644 index 000000000..3428e782c --- /dev/null +++ b/src/views/domain-page/hooks/use-domain-failover-history/get-domain-failover-history-query-options.ts @@ -0,0 +1,33 @@ +import queryString from 'query-string'; + +import { type ListFailoverHistoryRequestQueryParams } from '@/route-handlers/list-failover-history/list-failover-history.types'; +import request from '@/utils/request'; + +import { + type DomainFailoverHistoryQueryOptions, + type UseDomainFailoverHistoryReactQueryParams, +} from './use-domain-failover-history.types'; + +export default function getDomainFailoverHistoryQueryOptions( + params: UseDomainFailoverHistoryReactQueryParams +): DomainFailoverHistoryQueryOptions { + return { + queryKey: ['listFailoverHistory', params], + queryFn: async ({ + queryKey: [_, { domainName, domainId, cluster }], + pageParam, + }) => + request( + queryString.stringifyUrl({ + url: `/api/domains/${domainName}/${cluster}/failovers`, + query: { + domainId, + nextPage: pageParam, + } as const satisfies ListFailoverHistoryRequestQueryParams, + }) + ).then((res) => res.json()), + initialPageParam: undefined, + getNextPageParam: (res) => res.nextPageToken, + retry: false, + }; +} diff --git a/src/views/domain-page/hooks/use-domain-failover-history/use-domain-failover-history.ts b/src/views/domain-page/hooks/use-domain-failover-history/use-domain-failover-history.ts new file mode 100644 index 000000000..2b8332732 --- /dev/null +++ b/src/views/domain-page/hooks/use-domain-failover-history/use-domain-failover-history.ts @@ -0,0 +1,44 @@ +import { useMemo } from 'react'; + +import { useInfiniteQuery } from '@tanstack/react-query'; + +import clusterFailoverMatchesAttribute from '../../helpers/cluster-failover-matches-attribute'; + +import getDomainFailoverHistoryQueryOptions from './get-domain-failover-history-query-options'; +import { type UseDomainFailoverHistoryParams } from './use-domain-failover-history.types'; + +export default function useDomainFailoverHistory( + params: UseDomainFailoverHistoryParams +) { + const { clusterAttributeScope, clusterAttributeValue, ...reactQueryParams } = + params; + + const queryResult = useInfiniteQuery( + getDomainFailoverHistoryQueryOptions(reactQueryParams) + ); + + const allFailoverEvents = useMemo(() => { + if (!queryResult.data) return []; + return queryResult.data.pages.flatMap((page) => page.failoverEvents ?? []); + }, [queryResult.data]); + + const filteredFailoverEvents = useMemo(() => { + if (!clusterAttributeScope) return allFailoverEvents; + + return allFailoverEvents.filter((failover) => + failover.clusterFailovers.some((clusterFailover) => + clusterFailoverMatchesAttribute( + clusterFailover, + clusterAttributeScope, + clusterAttributeValue + ) + ) + ); + }, [allFailoverEvents, clusterAttributeScope, clusterAttributeValue]); + + return { + ...queryResult, + allFailoverEvents, + filteredFailoverEvents, + }; +} diff --git a/src/views/domain-page/hooks/use-domain-failover-history/use-domain-failover-history.types.ts b/src/views/domain-page/hooks/use-domain-failover-history/use-domain-failover-history.types.ts new file mode 100644 index 000000000..df5cd6c73 --- /dev/null +++ b/src/views/domain-page/hooks/use-domain-failover-history/use-domain-failover-history.types.ts @@ -0,0 +1,36 @@ +import { + type InfiniteData, + type UseInfiniteQueryOptions, +} from '@tanstack/react-query'; + +import { type ListFailoverHistoryResponse } from '@/route-handlers/list-failover-history/list-failover-history.types'; +import { type RequestError } from '@/utils/request/request-error'; + +export type UseDomainFailoverHistoryReactQueryParams = { + domainName: string; + domainId: string; + cluster: string; +}; + +export type UseDomainFailoverHistoryFilterParams = { + clusterAttributeScope?: string; + clusterAttributeValue?: string; +}; + +export type UseDomainFailoverHistoryParams = + UseDomainFailoverHistoryReactQueryParams & + UseDomainFailoverHistoryFilterParams; + +export type DomainFailoverHistoryQueryKey = [ + string, + UseDomainFailoverHistoryReactQueryParams, +]; + +export type DomainFailoverHistoryQueryOptions = UseInfiniteQueryOptions< + ListFailoverHistoryResponse, + RequestError, + InfiniteData, + ListFailoverHistoryResponse, + DomainFailoverHistoryQueryKey, + string | undefined +>; From 5cd9853d76826c7f255122faa800e47a296c1252 Mon Sep 17 00:00:00 2001 From: Tim Chan Date: Fri, 14 Nov 2025 08:19:09 -0800 Subject: [PATCH 36/58] feat: New Cron Tab (#1078) Signed-off-by: Tim Chan --- .../domain-cron-list.styles.ts | 11 ++++++ .../domain-cron-list/domain-cron-list.tsx | 11 ++++++ .../config/domain-page-tabs.config.ts | 13 ++++++- .../__tests__/domain-page-tabs.test.tsx | 37 ++++++++++++------- .../domain-page-tabs/domain-page-tabs.tsx | 9 ++++- 5 files changed, 66 insertions(+), 15 deletions(-) create mode 100644 src/views/domain-cron-list/domain-cron-list.styles.ts create mode 100644 src/views/domain-cron-list/domain-cron-list.tsx diff --git a/src/views/domain-cron-list/domain-cron-list.styles.ts b/src/views/domain-cron-list/domain-cron-list.styles.ts new file mode 100644 index 000000000..726414066 --- /dev/null +++ b/src/views/domain-cron-list/domain-cron-list.styles.ts @@ -0,0 +1,11 @@ +import { styled as createStyled, type Theme } from 'baseui'; + +export const styled = { + DomainCronListContainer: createStyled( + 'div', + ({ $theme }: { $theme: Theme }) => ({ + marginTop: $theme.sizing.scale950, + marginBottom: $theme.sizing.scale900, + }) + ), +}; diff --git a/src/views/domain-cron-list/domain-cron-list.tsx b/src/views/domain-cron-list/domain-cron-list.tsx new file mode 100644 index 000000000..4fa900736 --- /dev/null +++ b/src/views/domain-cron-list/domain-cron-list.tsx @@ -0,0 +1,11 @@ +import React from 'react'; + +import { styled } from './domain-cron-list.styles'; + +export default function DomainCronList() { + return ( + + Cron List, Coming Soon! + + ); +} diff --git a/src/views/domain-page/config/domain-page-tabs.config.ts b/src/views/domain-page/config/domain-page-tabs.config.ts index a18070116..449eb8639 100644 --- a/src/views/domain-page/config/domain-page-tabs.config.ts +++ b/src/views/domain-page/config/domain-page-tabs.config.ts @@ -4,8 +4,10 @@ import { MdSettings, MdSort, MdSyncAlt, + MdSchedule, } from 'react-icons/md'; +import DomainCronList from '@/views/domain-cron-list/domain-cron-list'; import DomainWorkflows from '@/views/domain-workflows/domain-workflows'; import DomainWorkflowsArchival from '@/views/domain-workflows-archival/domain-workflows-archival'; @@ -15,7 +17,7 @@ import DomainPageSettings from '../domain-page-settings/domain-page-settings'; import type { DomainPageTabsConfig } from '../domain-page-tabs/domain-page-tabs.types'; const domainPageTabsConfig: DomainPageTabsConfig< - 'workflows' | 'metadata' | 'failovers' | 'settings' | 'archival' + 'workflows' | 'cron-list' | 'metadata' | 'failovers' | 'settings' | 'archival' > = { workflows: { title: 'Workflows', @@ -26,6 +28,15 @@ const domainPageTabsConfig: DomainPageTabsConfig< actions: [{ kind: 'retry', label: 'Retry' }], }), }, + 'cron-list': { + title: 'Cron', + artwork: MdSchedule, + content: DomainCronList, + getErrorConfig: () => ({ + message: 'Failed to load cron list', + actions: [{ kind: 'retry', label: 'Retry' }], + }), + }, metadata: { title: 'Metadata', artwork: MdListAlt, diff --git a/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx b/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx index f6dce046b..247f98d0e 100644 --- a/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx +++ b/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx @@ -40,6 +40,10 @@ jest.mock('../../config/domain-page-tabs.config', () => ({ title: 'Workflows', artwork: () =>
, }, + 'cron-list': { + title: 'Cron', + artwork: () =>
, + }, metadata: { title: 'Metadata', artwork: () =>
, @@ -67,20 +71,26 @@ describe(DomainPageTabs.name, () => { jest.clearAllMocks(); }); - it('renders tabs titles correctly with failover history disabled', async () => { - await setup({ enableFailoverHistory: false }); + it('renders tabs titles correctly', async () => { + await setup(); expect(screen.getByText('Workflows')).toBeInTheDocument(); expect(screen.getByText('Metadata')).toBeInTheDocument(); expect(screen.getByText('Settings')).toBeInTheDocument(); expect(screen.getByText('Archival')).toBeInTheDocument(); + + expect(screen.queryByText('Cron')).toBeNull(); expect(screen.queryByText('Failovers')).toBeNull(); }); - it('renders tabs with failover history enabled', async () => { - await setup({ enableFailoverHistory: true }); + it('renders tabs with cron and failover history enabled', async () => { + await setup({ + enableFailoverHistory: true, + enableCronList: true, + }); expect(screen.getByText('Workflows')).toBeInTheDocument(); + expect(screen.getByText('Cron')).toBeInTheDocument(); expect(screen.getByText('Metadata')).toBeInTheDocument(); expect(screen.getByText('Failovers')).toBeInTheDocument(); expect(screen.getByText('Settings')).toBeInTheDocument(); @@ -88,7 +98,7 @@ describe(DomainPageTabs.name, () => { }); it('reroutes when new tab is clicked', async () => { - const { user } = await setup({ enableFailoverHistory: false }); + const { user } = await setup(); const metadataTab = await screen.findByText('Metadata'); await user.click(metadataTab); @@ -108,7 +118,7 @@ describe(DomainPageTabs.name, () => { writable: true, }); - const { user } = await setup({ enableFailoverHistory: false }); + const { user } = await setup(); const metadataTab = await screen.findByText('Metadata'); await user.click(metadataTab); @@ -121,7 +131,7 @@ describe(DomainPageTabs.name, () => { }); it('renders tabs artworks correctly', async () => { - await setup({ enableFailoverHistory: false }); + await setup(); expect(screen.getByTestId('workflows-artwork')).toBeInTheDocument(); expect(screen.getByTestId('metadata-artwork')).toBeInTheDocument(); @@ -152,13 +162,12 @@ describe(DomainPageTabs.name, () => { }); }); -async function setup({ - error, - enableFailoverHistory, -}: { +async function setup(opts?: { error?: boolean; enableFailoverHistory?: boolean; + enableCronList?: boolean; }) { + const { error, enableFailoverHistory, enableCronList } = opts ?? {}; const user = userEvent.setup(); render( @@ -183,8 +192,10 @@ async function setup({ ); } else { return HttpResponse.json( - (enableFailoverHistory ?? - false) satisfies GetConfigResponse<'FAILOVER_HISTORY_ENABLED'> + ((enableFailoverHistory ?? + false) satisfies GetConfigResponse<'FAILOVER_HISTORY_ENABLED'>) || + ((enableCronList ?? + false) satisfies GetConfigResponse<'CRON_LIST_ENABLED'>) ); } }, diff --git a/src/views/domain-page/domain-page-tabs/domain-page-tabs.tsx b/src/views/domain-page/domain-page-tabs/domain-page-tabs.tsx index 98eaaf0c5..2c661b3be 100644 --- a/src/views/domain-page/domain-page-tabs/domain-page-tabs.tsx +++ b/src/views/domain-page/domain-page-tabs/domain-page-tabs.tsx @@ -27,6 +27,9 @@ export default function DomainPageTabs() { 'FAILOVER_HISTORY_ENABLED' ); + const { data: isCronListEnabled } = + useSuspenseConfigValue('CRON_LIST_ENABLED'); + const tabsConfig = useMemo>(() => { const tabsToHide: Array = []; @@ -34,8 +37,12 @@ export default function DomainPageTabs() { tabsToHide.push('failovers'); } + if (!isCronListEnabled) { + tabsToHide.push('cron-list'); + } + return omit(domainPageTabsConfig, tabsToHide); - }, [isFailoverHistoryEnabled]); + }, [isFailoverHistoryEnabled, isCronListEnabled]); const tabList = useMemo( () => From 48b3d9248064487a3ec6120bbc8e31089e235809 Mon Sep 17 00:00:00 2001 From: Adhitya Mamallan Date: Fri, 14 Nov 2025 18:28:53 +0100 Subject: [PATCH 37/58] feat: Failover History table (#1076) * Add table to display failover history of a domain, with support for both active-passive and active-active domains Signed-off-by: Adhitya Mamallan Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .../__fixtures__/domain-page-query-params.ts | 2 + ...ge-failovers-table-active-active.config.ts | 18 + .../domain-page-failovers-table.config.ts | 48 +++ .../config/domain-page-query-params.config.ts | 11 + ...omain-page-failover-active-active.test.tsx | 311 ++++++++++++++++++ ...main-page-failover-active-active.styles.ts | 26 ++ .../domain-page-failover-active-active.tsx | 76 +++++ ...omain-page-failover-active-active.types.ts | 5 + ...main-page-failover-single-cluster.test.tsx | 28 ++ ...ain-page-failover-single-cluster.styles.ts | 17 + .../domain-page-failover-single-cluster.tsx | 23 ++ ...main-page-failover-single-cluster.types.ts | 4 + .../__tests__/domain-page-failovers.test.tsx | 227 +++++++++++++ .../domain-page-failovers.styles.ts | 10 + .../domain-page-failovers.tsx | 69 +++- 15 files changed, 873 insertions(+), 2 deletions(-) create mode 100644 src/views/domain-page/config/domain-page-failovers-table-active-active.config.ts create mode 100644 src/views/domain-page/config/domain-page-failovers-table.config.ts create mode 100644 src/views/domain-page/domain-page-failover-active-active/__tests__/domain-page-failover-active-active.test.tsx create mode 100644 src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.styles.ts create mode 100644 src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.tsx create mode 100644 src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.types.ts create mode 100644 src/views/domain-page/domain-page-failover-single-cluster/__tests__/domain-page-failover-single-cluster.test.tsx create mode 100644 src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.styles.ts create mode 100644 src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.tsx create mode 100644 src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.types.ts create mode 100644 src/views/domain-page/domain-page-failovers/__tests__/domain-page-failovers.test.tsx create mode 100644 src/views/domain-page/domain-page-failovers/domain-page-failovers.styles.ts diff --git a/src/views/domain-page/__fixtures__/domain-page-query-params.ts b/src/views/domain-page/__fixtures__/domain-page-query-params.ts index 993a1ad07..bc65c4601 100644 --- a/src/views/domain-page/__fixtures__/domain-page-query-params.ts +++ b/src/views/domain-page/__fixtures__/domain-page-query-params.ts @@ -23,4 +23,6 @@ export const mockDomainPageQueryParamsValues = { sortColumnArchival: 'startTime', sortOrderArchival: 'DESC', queryArchival: '', + clusterAttributeScope: undefined, + clusterAttributeValue: undefined, } as const satisfies PageQueryParamValues; diff --git a/src/views/domain-page/config/domain-page-failovers-table-active-active.config.ts b/src/views/domain-page/config/domain-page-failovers-table-active-active.config.ts new file mode 100644 index 000000000..8562a614a --- /dev/null +++ b/src/views/domain-page/config/domain-page-failovers-table-active-active.config.ts @@ -0,0 +1,18 @@ +import { createElement } from 'react'; + +import { type FailoverEvent } from '@/route-handlers/list-failover-history/list-failover-history.types'; + +import DomainPageFailoverActiveActive from '../domain-page-failover-active-active/domain-page-failover-active-active'; + +import domainPageFailoversTableConfig from './domain-page-failovers-table.config'; + +const domainPageFailoversTableActiveActiveConfig = [ + ...domainPageFailoversTableConfig.slice(0, 3), + { + ...domainPageFailoversTableConfig[3], + renderCell: (event: FailoverEvent) => + createElement(DomainPageFailoverActiveActive, { failoverEvent: event }), + }, +]; + +export default domainPageFailoversTableActiveActiveConfig; diff --git a/src/views/domain-page/config/domain-page-failovers-table.config.ts b/src/views/domain-page/config/domain-page-failovers-table.config.ts new file mode 100644 index 000000000..f3cf99ef7 --- /dev/null +++ b/src/views/domain-page/config/domain-page-failovers-table.config.ts @@ -0,0 +1,48 @@ +import { createElement } from 'react'; + +import FormattedDate from '@/components/formatted-date/formatted-date'; +import { type TableConfig } from '@/components/table/table.types'; +import { type FailoverEvent } from '@/route-handlers/list-failover-history/list-failover-history.types'; +import parseGrpcTimestamp from '@/utils/datetime/parse-grpc-timestamp'; + +import DomainPageFailoverSingleCluster from '../domain-page-failover-single-cluster/domain-page-failover-single-cluster'; +import { FAILOVER_TYPE_LABEL_MAP } from '../domain-page-failovers/domain-page-failovers.constants'; + +const domainPageFailoversTableConfig = [ + { + name: 'Failover ID', + id: 'failoverId', + width: '35%', + renderCell: (event: FailoverEvent) => event.id, + }, + { + name: 'Time', + id: 'time', + width: '15%', + renderCell: (event: FailoverEvent) => + createElement(FormattedDate, { + timestampMs: event.createdTime + ? parseGrpcTimestamp(event.createdTime) + : null, + }), + }, + { + name: 'Type', + id: 'type', + width: '10%', + renderCell: (event: FailoverEvent) => + FAILOVER_TYPE_LABEL_MAP[event.failoverType], + }, + { + name: 'Failover Information', + id: 'failoverInfo', + width: '40%', + renderCell: (event: FailoverEvent) => + createElement(DomainPageFailoverSingleCluster, { + fromCluster: event.clusterFailovers[0]?.fromCluster?.activeClusterName, + toCluster: event.clusterFailovers[0]?.toCluster?.activeClusterName, + }), + }, +] as const satisfies TableConfig; + +export default domainPageFailoversTableConfig; diff --git a/src/views/domain-page/config/domain-page-query-params.config.ts b/src/views/domain-page/config/domain-page-query-params.config.ts index 645fdb45f..48d551c51 100644 --- a/src/views/domain-page/config/domain-page-query-params.config.ts +++ b/src/views/domain-page/config/domain-page-query-params.config.ts @@ -41,6 +41,9 @@ const domainPageQueryParamsConfig: [ PageQueryParam<'sortColumnArchival', string>, PageQueryParam<'sortOrderArchival', SortOrder>, PageQueryParam<'queryArchival', string>, + // Failovers Tab query params + PageQueryParam<'clusterAttributeScope', string | undefined>, + PageQueryParam<'clusterAttributeValue', string | undefined>, ] = [ { key: 'inputType', @@ -163,6 +166,14 @@ const domainPageQueryParamsConfig: [ queryParamKey: 'aquery', defaultValue: '', }, + { + key: 'clusterAttributeScope', + queryParamKey: 'cs', + }, + { + key: 'clusterAttributeValue', + queryParamKey: 'cv', + }, ] as const; export default domainPageQueryParamsConfig; diff --git a/src/views/domain-page/domain-page-failover-active-active/__tests__/domain-page-failover-active-active.test.tsx b/src/views/domain-page/domain-page-failover-active-active/__tests__/domain-page-failover-active-active.test.tsx new file mode 100644 index 000000000..42691611a --- /dev/null +++ b/src/views/domain-page/domain-page-failover-active-active/__tests__/domain-page-failover-active-active.test.tsx @@ -0,0 +1,311 @@ +import { render, screen } from '@/test-utils/rtl'; + +import * as usePageQueryParamsModule from '@/hooks/use-page-query-params/use-page-query-params'; +import { type FailoverEvent } from '@/route-handlers/list-failover-history/list-failover-history.types'; +import { mockDomainPageQueryParamsValues } from '@/views/domain-page/__fixtures__/domain-page-query-params'; +import { PRIMARY_CLUSTER_SCOPE } from '@/views/domain-page/domain-page-failovers/domain-page-failovers.constants'; + +import DomainPageFailoverActiveActive from '../domain-page-failover-active-active'; + +const mockSetQueryParams = jest.fn(); +jest.mock('@/hooks/use-page-query-params/use-page-query-params', () => + jest.fn(() => [mockDomainPageQueryParamsValues, mockSetQueryParams]) +); + +jest.mock( + '../../domain-page-failover-single-cluster/domain-page-failover-single-cluster', + () => + jest.fn((props: { fromCluster?: string; toCluster?: string }) => ( +
+ {`${props.fromCluster} -> ${props.toCluster}`} +
+ )) +); + +describe(DomainPageFailoverActiveActive.name, () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('renders cluster failover when matching primary cluster failover is found', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: null, + }, + ], + }; + + setup({ + failoverEvent, + clusterAttributeScope: PRIMARY_CLUSTER_SCOPE, + }); + + expect(screen.getByText('Primary:')).toBeInTheDocument(); + expect( + screen.getByTestId('mock-single-cluster-failover') + ).toBeInTheDocument(); + expect(screen.getByText('cluster-1 -> cluster-2')).toBeInTheDocument(); + expect(screen.getByText('See more')).toBeInTheDocument(); + }); + + it('renders cluster failover when matching non-primary cluster failover is found', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }, + ], + }; + + setup({ + failoverEvent, + clusterAttributeScope: 'city', + clusterAttributeValue: 'new_york', + }); + + expect(screen.getByText('city (new_york):')).toBeInTheDocument(); + expect( + screen.getByTestId('mock-single-cluster-failover') + ).toBeInTheDocument(); + expect(screen.getByText('cluster-1 -> cluster-2')).toBeInTheDocument(); + expect(screen.getByText('See more')).toBeInTheDocument(); + }); + + it('does not render cluster failover section when clusterAttributeScope is set but clusterAttributeValue is undefined for non-primary scope', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'region', + name: 'us-east', + }, + }, + ], + }; + + setup({ + failoverEvent, + clusterAttributeScope: 'region', + }); + + expect( + screen.queryByTestId('mock-single-cluster-failover') + ).not.toBeInTheDocument(); + expect(screen.getByText('See more')).toBeInTheDocument(); + }); + + it('does not render cluster failover section when no matching cluster failover is found', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }, + ], + }; + + setup({ + failoverEvent, + clusterAttributeScope: 'city', + clusterAttributeValue: 'los_angeles', + }); + + expect(screen.queryByText('city (los_angeles):')).not.toBeInTheDocument(); + expect( + screen.queryByTestId('mock-single-cluster-failover') + ).not.toBeInTheDocument(); + expect(screen.getByText('See more')).toBeInTheDocument(); + }); + + it('does not render cluster failover section when clusterAttributeScope is undefined', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: null, + }, + ], + }; + + setup({ + failoverEvent, + clusterAttributeScope: undefined, + }); + + expect( + screen.queryByTestId('mock-single-cluster-failover') + ).not.toBeInTheDocument(); + expect(screen.getByText('See more')).toBeInTheDocument(); + }); + + it('renders "See more" button even when no matching cluster failover is found', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [], + }; + + setup({ + failoverEvent, + clusterAttributeScope: PRIMARY_CLUSTER_SCOPE, + }); + + expect( + screen.queryByTestId('mock-single-cluster-failover') + ).not.toBeInTheDocument(); + expect(screen.getByText('See more')).toBeInTheDocument(); + }); + + it('selects the correct cluster failover when multiple cluster failovers exist', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }, + { + fromCluster: { + activeClusterName: 'cluster-3', + failoverVersion: '3', + }, + toCluster: { + activeClusterName: 'cluster-4', + failoverVersion: '4', + }, + clusterAttribute: { + scope: 'region', + name: 'us-east', + }, + }, + ], + }; + + setup({ + failoverEvent, + clusterAttributeScope: 'region', + clusterAttributeValue: 'us-east', + }); + + expect(screen.getByText('region (us-east):')).toBeInTheDocument(); + expect( + screen.getByTestId('mock-single-cluster-failover') + ).toBeInTheDocument(); + expect(screen.getByText('cluster-3 -> cluster-4')).toBeInTheDocument(); + }); +}); + +function setup({ + failoverEvent, + clusterAttributeScope, + clusterAttributeValue, +}: { + failoverEvent: FailoverEvent; + clusterAttributeScope?: string; + clusterAttributeValue?: string; +}) { + jest.spyOn(usePageQueryParamsModule, 'default').mockReturnValue([ + { + ...mockDomainPageQueryParamsValues, + clusterAttributeScope, + clusterAttributeValue, + } as typeof mockDomainPageQueryParamsValues, + mockSetQueryParams, + ]); + + render(); +} diff --git a/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.styles.ts b/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.styles.ts new file mode 100644 index 000000000..37628c859 --- /dev/null +++ b/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.styles.ts @@ -0,0 +1,26 @@ +import { styled as createStyled, type Theme } from 'baseui'; + +export const styled = { + FailoverEventContainer: createStyled( + 'div', + ({ $theme }: { $theme: Theme }) => ({ + display: 'flex', + gap: $theme.sizing.scale600, + alignItems: 'baseline', + }) + ), + ClusterFailoverContainer: createStyled( + 'div', + ({ $theme }: { $theme: Theme }) => ({ + display: 'flex', + alignItems: 'baseline', + gap: $theme.sizing.scale300, + }) + ), + ClusterAttributeLabel: createStyled( + 'div', + ({ $theme }: { $theme: Theme }) => ({ + ...$theme.typography.LabelSmall, + }) + ), +}; diff --git a/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.tsx b/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.tsx new file mode 100644 index 000000000..c8d5ccc07 --- /dev/null +++ b/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.tsx @@ -0,0 +1,76 @@ +import { useMemo } from 'react'; + +import { Button } from 'baseui/button'; +import { MdVisibility } from 'react-icons/md'; + +import usePageQueryParams from '@/hooks/use-page-query-params/use-page-query-params'; + +import domainPageQueryParamsConfig from '../config/domain-page-query-params.config'; +import DomainPageFailoverSingleCluster from '../domain-page-failover-single-cluster/domain-page-failover-single-cluster'; +import { PRIMARY_CLUSTER_SCOPE } from '../domain-page-failovers/domain-page-failovers.constants'; +import clusterFailoverMatchesAttribute from '../helpers/cluster-failover-matches-attribute'; + +import { styled } from './domain-page-failover-active-active.styles'; +import { type Props } from './domain-page-failover-active-active.types'; + +export default function DomainPageFailoverActiveActive({ + failoverEvent, +}: Props) { + const [{ clusterAttributeScope, clusterAttributeValue }] = usePageQueryParams( + domainPageQueryParamsConfig + ); + + const clusterFailoverForMaybeSelectedAttribute = useMemo(() => { + if ( + !clusterAttributeScope || + (clusterAttributeScope !== PRIMARY_CLUSTER_SCOPE && + !clusterAttributeValue) + ) + return undefined; + + return failoverEvent.clusterFailovers.find((clusterFailover) => + clusterFailoverMatchesAttribute( + clusterFailover, + clusterAttributeScope, + clusterAttributeValue + ) + ); + }, [ + clusterAttributeScope, + clusterAttributeValue, + failoverEvent.clusterFailovers, + ]); + + return ( + + {clusterFailoverForMaybeSelectedAttribute && ( + + + {clusterAttributeScope === PRIMARY_CLUSTER_SCOPE + ? 'Primary:' + : `${clusterAttributeScope} (${clusterAttributeValue}):`} + + + + )} + + + ); +} diff --git a/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.types.ts b/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.types.ts new file mode 100644 index 000000000..300e642c9 --- /dev/null +++ b/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.types.ts @@ -0,0 +1,5 @@ +import { type FailoverEvent } from '@/route-handlers/list-failover-history/list-failover-history.types'; + +export type Props = { + failoverEvent: FailoverEvent; +}; diff --git a/src/views/domain-page/domain-page-failover-single-cluster/__tests__/domain-page-failover-single-cluster.test.tsx b/src/views/domain-page/domain-page-failover-single-cluster/__tests__/domain-page-failover-single-cluster.test.tsx new file mode 100644 index 000000000..849cb6b0f --- /dev/null +++ b/src/views/domain-page/domain-page-failover-single-cluster/__tests__/domain-page-failover-single-cluster.test.tsx @@ -0,0 +1,28 @@ +import { render, screen } from '@/test-utils/rtl'; + +import DomainPageFailoverSingleCluster from '../domain-page-failover-single-cluster'; + +describe(DomainPageFailoverSingleCluster.name, () => { + it('renders fromCluster and toCluster with arrow', () => { + setup({ fromCluster: 'cluster-1', toCluster: 'cluster-2' }); + + expect(screen.getByText(/cluster-1/)).toBeInTheDocument(); + expect(screen.getByText(/cluster-2/)).toBeInTheDocument(); + }); + + it('returns null when fromCluster is missing', () => { + setup({ toCluster: 'cluster-2' }); + + expect(screen.queryByText(/cluster-2/)).not.toBeInTheDocument(); + }); + + it('returns null when toCluster is missing', () => { + setup({ fromCluster: 'cluster-1' }); + + expect(screen.queryByText(/cluster-1/)).not.toBeInTheDocument(); + }); +}); + +function setup(props: { fromCluster?: string; toCluster?: string }) { + render(); +} diff --git a/src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.styles.ts b/src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.styles.ts new file mode 100644 index 000000000..b6ba98489 --- /dev/null +++ b/src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.styles.ts @@ -0,0 +1,17 @@ +import { type Theme } from 'baseui'; + +import type { + StyletronCSSObject, + StyletronCSSObjectOf, +} from '@/hooks/use-styletron-classes'; + +const cssStylesObj = { + failoverContainer: (theme: Theme) => ({ + display: 'flex', + gap: theme.sizing.scale400, + alignItems: 'center', + }), +} satisfies StyletronCSSObject; + +export const cssStyles: StyletronCSSObjectOf = + cssStylesObj; diff --git a/src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.tsx b/src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.tsx new file mode 100644 index 000000000..3466dc29e --- /dev/null +++ b/src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.tsx @@ -0,0 +1,23 @@ +import { MdArrowForward } from 'react-icons/md'; + +import useStyletronClasses from '@/hooks/use-styletron-classes'; + +import { cssStyles } from './domain-page-failover-single-cluster.styles'; +import { type Props } from './domain-page-failover-single-cluster.types'; + +export default function DomainPageFailoverSingleCluster({ + fromCluster, + toCluster, +}: Props) { + const { cls, theme } = useStyletronClasses(cssStyles); + + if (!fromCluster || !toCluster) return null; + + return ( +
+ {fromCluster} + + {toCluster} +
+ ); +} diff --git a/src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.types.ts b/src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.types.ts new file mode 100644 index 000000000..c5714aeae --- /dev/null +++ b/src/views/domain-page/domain-page-failover-single-cluster/domain-page-failover-single-cluster.types.ts @@ -0,0 +1,4 @@ +export type Props = { + fromCluster?: string; + toCluster?: string; +}; diff --git a/src/views/domain-page/domain-page-failovers/__tests__/domain-page-failovers.test.tsx b/src/views/domain-page/domain-page-failovers/__tests__/domain-page-failovers.test.tsx new file mode 100644 index 000000000..5868c44e2 --- /dev/null +++ b/src/views/domain-page/domain-page-failovers/__tests__/domain-page-failovers.test.tsx @@ -0,0 +1,227 @@ +import React, { Suspense } from 'react'; + +import { HttpResponse } from 'msw'; + +import { render, screen } from '@/test-utils/rtl'; + +import { type Props as LoaderProps } from '@/components/table/table-infinite-scroll-loader/table-infinite-scroll-loader.types'; +import { type DescribeDomainResponse } from '@/route-handlers/describe-domain/describe-domain.types'; +import { + type FailoverEvent, + type ListFailoverHistoryResponse, +} from '@/route-handlers/list-failover-history/list-failover-history.types'; +import { mockDomainDescription } from '@/views/domain-page/__fixtures__/domain-description'; +import { mockDomainPageQueryParamsValues } from '@/views/domain-page/__fixtures__/domain-page-query-params'; +import { mockActiveActiveDomain } from '@/views/shared/active-active/__fixtures__/active-active-domain'; + +import DomainPageFailovers from '../domain-page-failovers'; + +const mockSetQueryParams = jest.fn(); +jest.mock('@/hooks/use-page-query-params/use-page-query-params', () => + jest.fn(() => [mockDomainPageQueryParamsValues, mockSetQueryParams]) +); + +jest.mock( + '@/components/table/table-infinite-scroll-loader/table-infinite-scroll-loader', + () => + jest.fn((props: LoaderProps) => ( + + )) +); + +jest.mock('../../config/domain-page-failovers-table.config', () => [ + { + name: 'Failover ID', + id: 'failoverId', + width: '35%', + renderCell: (event: FailoverEvent) =>
{event.id}
, + }, + { + name: 'Time', + id: 'time', + width: '15%', + renderCell: (event: FailoverEvent) => ( +
{event.createdTime?.seconds || 'No date'}
+ ), + }, + { + name: 'Type', + id: 'type', + width: '10%', + renderCell: (event: FailoverEvent) =>
{event.failoverType}
, + }, + { + name: 'Failover Information', + id: 'failoverInfo', + width: '40%', + renderCell: (event: FailoverEvent) => ( +
+ {event.clusterFailovers[0]?.fromCluster?.activeClusterName} + {` -> `} + {event.clusterFailovers[0]?.toCluster?.activeClusterName} +
+ ), + }, +]); + +jest.mock( + '../../config/domain-page-failovers-table-active-active.config', + () => [ + { + name: 'Failover ID', + id: 'failoverId', + width: '35%', + renderCell: (event: FailoverEvent) =>
{event.id}
, + }, + { + name: 'Time', + id: 'time', + width: '15%', + renderCell: (event: FailoverEvent) => ( +
{event.createdTime?.seconds || 'No date'}
+ ), + }, + { + name: 'Type', + id: 'type', + width: '10%', + renderCell: (event: FailoverEvent) =>
{event.failoverType}
, + }, + { + name: 'Failover Information', + id: 'failoverInfo', + width: '40%', + renderCell: (event: FailoverEvent) => ( +
Active Active: {event.id}
+ ), + }, + ] +); + +const mockFailoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: null, + }, + ], +}; + +describe(DomainPageFailovers.name, () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('renders table with column headers', async () => { + await setup({}); + + expect(await screen.findByText('Failover ID')).toBeInTheDocument(); + expect(screen.getByText('Time')).toBeInTheDocument(); + expect(screen.getByText('Type')).toBeInTheDocument(); + expect(screen.getByText('Failover Information')).toBeInTheDocument(); + }); + + it('renders failover events in table', async () => { + await setup({ + failoverResponse: { + failoverEvents: [mockFailoverEvent], + nextPageToken: '', + }, + }); + + expect(await screen.findByText('failover-1')).toBeInTheDocument(); + expect(screen.getByText('FAILOVER_TYPE_GRACEFUL')).toBeInTheDocument(); + }); + + it('does not render data rows when no failover events', async () => { + await setup({ + failoverResponse: { + failoverEvents: [], + nextPageToken: '', + }, + }); + + await screen.findByText('Failover ID'); + expect(screen.queryByText('failover-1')).not.toBeInTheDocument(); + }); + + it('renders table with active-active config when domain is active-active', async () => { + await setup({ + domainDescription: mockActiveActiveDomain, + failoverResponse: { + failoverEvents: [mockFailoverEvent], + nextPageToken: '', + }, + }); + + expect(await screen.findByText('failover-1')).toBeInTheDocument(); + }); +}); + +async function setup({ + domain = 'mock-domain', + cluster = 'mock-cluster', + domainDescription = mockDomainDescription, + failoverResponse = { + failoverEvents: [], + nextPageToken: '', + }, + failoverError = false, +}: { + domain?: string; + cluster?: string; + domainDescription?: typeof mockDomainDescription; + failoverResponse?: ListFailoverHistoryResponse; + failoverError?: boolean; +}) { + render( + Loading...
}> + + , + { + endpointsMocks: [ + { + path: '/api/domains/:domain/:cluster', + httpMethod: 'GET', + mockOnce: false, + jsonResponse: domainDescription satisfies DescribeDomainResponse, + }, + { + path: '/api/domains/:domain/:cluster/failovers', + httpMethod: 'GET', + mockOnce: false, + ...(failoverError + ? { + httpResolver: () => { + return HttpResponse.json( + { message: 'Failed to fetch failover history' }, + { status: 500 } + ); + }, + } + : { + jsonResponse: + failoverResponse satisfies ListFailoverHistoryResponse, + }), + }, + ], + } + ); + + await screen.findByText('Failover ID'); +} diff --git a/src/views/domain-page/domain-page-failovers/domain-page-failovers.styles.ts b/src/views/domain-page/domain-page-failovers/domain-page-failovers.styles.ts new file mode 100644 index 000000000..54216b184 --- /dev/null +++ b/src/views/domain-page/domain-page-failovers/domain-page-failovers.styles.ts @@ -0,0 +1,10 @@ +import { styled as createStyled, type Theme } from 'baseui'; + +export const styled = { + FailoversTableContainer: createStyled( + 'div', + ({ $theme }: { $theme: Theme }) => ({ + paddingTop: $theme.sizing.scale950, + }) + ), +}; diff --git a/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx b/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx index 019602f20..868c38ac3 100644 --- a/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx +++ b/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx @@ -1,8 +1,73 @@ 'use client'; import React from 'react'; +import Table from '@/components/table/table'; +import usePageQueryParams from '@/hooks/use-page-query-params/use-page-query-params'; +import isActiveActiveDomain from '@/views/shared/active-active/helpers/is-active-active-domain'; +import useSuspenseDomainDescription from '@/views/shared/hooks/use-domain-description/use-suspense-domain-description'; + +import domainPageFailoversTableActiveActiveConfig from '../config/domain-page-failovers-table-active-active.config'; +import domainPageFailoversTableConfig from '../config/domain-page-failovers-table.config'; +import domainPageQueryParamsConfig from '../config/domain-page-query-params.config'; import { type DomainPageTabContentProps } from '../domain-page-content/domain-page-content.types'; +import useDomainFailoverHistory from '../hooks/use-domain-failover-history/use-domain-failover-history'; + +import { styled } from './domain-page-failovers.styles'; + +export default function DomainPageFailovers({ + domain, + cluster, +}: DomainPageTabContentProps) { + const { data: domainDescription } = useSuspenseDomainDescription({ + domain, + cluster, + }); + + const isActiveActive = isActiveActiveDomain(domainDescription); + + const [{ clusterAttributeScope, clusterAttributeValue }] = usePageQueryParams( + domainPageQueryParamsConfig + ); + + const { + filteredFailoverEvents, + allFailoverEvents, + isLoading, + error, + fetchNextPage, + hasNextPage, + isFetchingNextPage, + } = useDomainFailoverHistory({ + domainName: domain, + domainId: domainDescription.id, + cluster, + ...(isActiveActive + ? { + clusterAttributeScope, + clusterAttributeValue, + } + : {}), + }); -export default function DomainPageFailovers(_: DomainPageTabContentProps) { - return
WIP: Domain Page Failovers Tab
; + return ( + + 0} + endMessageProps={{ + kind: 'infinite-scroll', + hasData: allFailoverEvents.length > 0, + error, + fetchNextPage, + hasNextPage, + isFetchingNextPage, + }} + columns={ + isActiveActive + ? domainPageFailoversTableActiveActiveConfig + : domainPageFailoversTableConfig + } + /> + + ); } From ca0466c81e6fcc92d57f621c61e7fdc6b973167a Mon Sep 17 00:00:00 2001 From: Tim Chan Date: Fri, 14 Nov 2025 10:05:03 -0800 Subject: [PATCH 38/58] chore: Fix console.error pollution (#1077) * chore: Fix console.error pollution Signed-off-by: Tim Chan * Relocated to src/test-utils. Signed-off-by: Tim Chan --------- Signed-off-by: Tim Chan --- src/test-utils/mock-console-error.ts | 35 +++++++++++++++++++ .../__tests__/domain-page-tabs.test.tsx | 23 +++++++++--- 2 files changed, 54 insertions(+), 4 deletions(-) create mode 100644 src/test-utils/mock-console-error.ts diff --git a/src/test-utils/mock-console-error.ts b/src/test-utils/mock-console-error.ts new file mode 100644 index 000000000..545b6407b --- /dev/null +++ b/src/test-utils/mock-console-error.ts @@ -0,0 +1,35 @@ +/* + * Mocks console.error and silences known errors. + * + * Apply this utility to mute known errors and avoid polluting the test output. + */ +export const mockConsoleError = ({ + silencedErrorRegexes, +}: { + silencedErrorRegexes: RegExp[]; +}) => { + // eslint-disable-next-line no-console + const consoleError = console.error; + + const consoleErrorImpl = (...data: any[]) => { + const dataString = data.toString(); + let shouldIgnore = false; + + for (const regex of silencedErrorRegexes) { + if (regex.test(dataString)) { + shouldIgnore = true; + break; + } + } + + if (!shouldIgnore) { + consoleError(...data); + } + }; + + const spy = jest.spyOn(console, 'error').mockImplementation(consoleErrorImpl); + + return { + restore: () => spy.mockRestore(), + }; +}; diff --git a/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx b/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx index 247f98d0e..5b8f961c4 100644 --- a/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx +++ b/src/views/domain-page/domain-page-tabs/__tests__/domain-page-tabs.test.tsx @@ -6,6 +6,7 @@ import { render, screen, userEvent } from '@/test-utils/rtl'; import ErrorBoundary from '@/components/error-boundary/error-boundary'; import { type GetConfigResponse } from '@/route-handlers/get-config/get-config.types'; +import { mockConsoleError } from '@/test-utils/mock-console-error'; import DomainPageTabs from '../domain-page-tabs'; @@ -141,11 +142,25 @@ describe(DomainPageTabs.name, () => { }); it('handles errors gracefully', async () => { - await setup({ error: true }); + // Mute console.error to avoid polluting the test output. + const silencedErrorRegexes = [ + /RequestError: Failed to fetch config/, + /The above error occurred in the component/, + ]; + const { restore: restoreConsoleError } = mockConsoleError({ + silencedErrorRegexes, + }); + + try { + await setup({ error: true }); - expect( - await screen.findByText('Error: Failed to fetch config') - ).toBeInTheDocument(); + expect( + await screen.findByText('Error: Failed to fetch config') + ).toBeInTheDocument(); + } finally { + // Be sure to restore the console.error. + restoreConsoleError(); + } }); it('renders the help button as endEnhancer', async () => { From d4773f5982bf22437d95d0be5273cd5139635b4a Mon Sep 17 00:00:00 2001 From: Adhitya Mamallan Date: Mon, 17 Nov 2025 14:43:55 +0100 Subject: [PATCH 39/58] Add flag for History Page v2 (#1081) Signed-off-by: Adhitya Mamallan --- src/config/dynamic/dynamic.config.ts | 12 ++++++++++++ .../dynamic/resolvers/history-page-v2-enabled.ts | 11 +++++++++++ .../dynamic/resolvers/schemas/resolver-schemas.ts | 4 ++++ .../config/__fixtures__/resolved-config-values.ts | 1 + 4 files changed, 28 insertions(+) create mode 100644 src/config/dynamic/resolvers/history-page-v2-enabled.ts diff --git a/src/config/dynamic/dynamic.config.ts b/src/config/dynamic/dynamic.config.ts index 843939728..9c308be70 100644 --- a/src/config/dynamic/dynamic.config.ts +++ b/src/config/dynamic/dynamic.config.ts @@ -15,6 +15,7 @@ import cronListEnabled from './resolvers/cron-list-enabled'; import extendedDomainInfoEnabled from './resolvers/extended-domain-info-enabled'; import { type ExtendedDomainInfoEnabledConfig } from './resolvers/extended-domain-info-enabled.types'; import failoverHistoryEnabled from './resolvers/failover-history-enabled'; +import historyPageV2Enabled from './resolvers/history-page-v2-enabled'; import workflowActionsEnabled from './resolvers/workflow-actions-enabled'; import { type WorkflowActionsEnabledResolverParams, @@ -72,6 +73,12 @@ const dynamicConfigs: { 'request', true >; + HISTORY_PAGE_V2_ENABLED: ConfigAsyncResolverDefinition< + undefined, + boolean, + 'request', + true + >; } = { CADENCE_WEB_PORT: { env: 'CADENCE_WEB_PORT', @@ -121,6 +128,11 @@ const dynamicConfigs: { evaluateOn: 'request', isPublic: true, }, + HISTORY_PAGE_V2_ENABLED: { + resolver: historyPageV2Enabled, + evaluateOn: 'request', + isPublic: true, + }, } as const; export default dynamicConfigs; diff --git a/src/config/dynamic/resolvers/history-page-v2-enabled.ts b/src/config/dynamic/resolvers/history-page-v2-enabled.ts new file mode 100644 index 000000000..174e28c07 --- /dev/null +++ b/src/config/dynamic/resolvers/history-page-v2-enabled.ts @@ -0,0 +1,11 @@ +/** + * WIP: Returns whether the new Workflow History (V2) page is enabled + * + * To enable the new Workflow History (V2) page, set the CADENCE_HISTORY_PAGE_V2_ENABLED env variable to true. + * For further customization, override the implementation of this resolver. + * + * @returns {Promise} Whether Workflow History (V2) page is enabled. + */ +export default async function historyPageV2Enabled(): Promise { + return process.env.CADENCE_HISTORY_PAGE_V2_ENABLED === 'true'; +} diff --git a/src/config/dynamic/resolvers/schemas/resolver-schemas.ts b/src/config/dynamic/resolvers/schemas/resolver-schemas.ts index 7f36b9b98..6eff979d1 100644 --- a/src/config/dynamic/resolvers/schemas/resolver-schemas.ts +++ b/src/config/dynamic/resolvers/schemas/resolver-schemas.ts @@ -67,6 +67,10 @@ const resolverSchemas: ResolverSchemas = { args: z.undefined(), returnType: z.boolean(), }, + HISTORY_PAGE_V2_ENABLED: { + args: z.undefined(), + returnType: z.boolean(), + }, }; export default resolverSchemas; diff --git a/src/utils/config/__fixtures__/resolved-config-values.ts b/src/utils/config/__fixtures__/resolved-config-values.ts index 6fd54f813..f002ad4b0 100644 --- a/src/utils/config/__fixtures__/resolved-config-values.ts +++ b/src/utils/config/__fixtures__/resolved-config-values.ts @@ -43,5 +43,6 @@ const mockResolvedConfigValues: LoadedConfigResolvedValues = { WORKFLOW_DIAGNOSTICS_ENABLED: false, ARCHIVAL_DEFAULT_SEARCH_ENABLED: false, FAILOVER_HISTORY_ENABLED: false, + HISTORY_PAGE_V2_ENABLED: false, }; export default mockResolvedConfigValues; From 6ef301216eb753c666d8f1324fae07c4b70461d7 Mon Sep 17 00:00:00 2001 From: Adhitya Mamallan Date: Mon, 17 Nov 2025 14:52:24 +0100 Subject: [PATCH 40/58] feat: Filters for Failover History Table (#1079) Add filters for Failover History table, but only for active-active domains Create filter for Cluster Attribute Scope, which depends on domain description to suggest possible scopes Create filter for Cluster Attribute Value, which depends on selected scope to suggest possible values/names Remove (now) unused styles file from DomainPageFailovers Signed-off-by: Adhitya Mamallan --- .../domain-page-failovers-filters.test.tsx | 226 ++++++++++++++++++ .../domain-page-failovers-filters.styles.ts | 56 +++++ .../domain-page-failovers-filters.tsx | 136 +++++++++++ .../__tests__/domain-page-failovers.test.tsx | 19 ++ .../domain-page-failovers.styles.ts | 10 - .../domain-page-failovers.tsx | 18 +- 6 files changed, 450 insertions(+), 15 deletions(-) create mode 100644 src/views/domain-page/domain-page-failovers-filters/__tests__/domain-page-failovers-filters.test.tsx create mode 100644 src/views/domain-page/domain-page-failovers-filters/domain-page-failovers-filters.styles.ts create mode 100644 src/views/domain-page/domain-page-failovers-filters/domain-page-failovers-filters.tsx delete mode 100644 src/views/domain-page/domain-page-failovers/domain-page-failovers.styles.ts diff --git a/src/views/domain-page/domain-page-failovers-filters/__tests__/domain-page-failovers-filters.test.tsx b/src/views/domain-page/domain-page-failovers-filters/__tests__/domain-page-failovers-filters.test.tsx new file mode 100644 index 000000000..911fb5192 --- /dev/null +++ b/src/views/domain-page/domain-page-failovers-filters/__tests__/domain-page-failovers-filters.test.tsx @@ -0,0 +1,226 @@ +import { render, screen, userEvent } from '@/test-utils/rtl'; + +import { type PageQueryParamValues } from '@/hooks/use-page-query-params/use-page-query-params.types'; +import { mockDomainPageQueryParamsValues } from '@/views/domain-page/__fixtures__/domain-page-query-params'; +import type domainPageQueryParamsConfig from '@/views/domain-page/config/domain-page-query-params.config'; +import { mockActiveActiveDomain } from '@/views/shared/active-active/__fixtures__/active-active-domain'; +import { type ActiveActiveDomain } from '@/views/shared/active-active/active-active.types'; + +import { PRIMARY_CLUSTER_SCOPE } from '../../domain-page-failovers/domain-page-failovers.constants'; +import DomainPageFailoversFilters from '../domain-page-failovers-filters'; + +describe(DomainPageFailoversFilters.name, () => { + it('renders both filter comboboxes and reset button', () => { + setup({}); + + expect(screen.getByText('Cluster Attribute Scope')).toBeInTheDocument(); + expect(screen.getByText('Cluster Attribute Value')).toBeInTheDocument(); + expect(screen.getByText('Reset filters')).toBeInTheDocument(); + }); + + it('displays cluster attribute scope options including primary and domain scopes', async () => { + const { user } = setup({}); + + const scopeCombobox = screen.getByPlaceholderText( + 'Scope of cluster attribute' + ); + await user.click(scopeCombobox); + + expect(screen.getByText(PRIMARY_CLUSTER_SCOPE)).toBeInTheDocument(); + expect(screen.getByText('region')).toBeInTheDocument(); + }); + + it('disables cluster attribute value combobox when scope is primary', () => { + setup({ + queryParamsOverrides: { + clusterAttributeScope: PRIMARY_CLUSTER_SCOPE, + }, + }); + + expect( + screen.getByPlaceholderText('Value/name of cluster attribute') + ).toBeDisabled(); + }); + + it('enables cluster attribute value combobox when scope is not primary', () => { + setup({ + queryParamsOverrides: { + clusterAttributeScope: 'region', + }, + }); + + expect( + screen.getByPlaceholderText('Value/name of cluster attribute') + ).not.toBeDisabled(); + }); + + it('displays cluster attribute values for selected scope', async () => { + const { user } = setup({ + queryParamsOverrides: { + clusterAttributeScope: 'region', + }, + }); + + const valueCombobox = screen.getByPlaceholderText( + 'Value/name of cluster attribute' + ); + await user.click(valueCombobox); + + expect(screen.getByText('region0')).toBeInTheDocument(); + expect(screen.getByText('region1')).toBeInTheDocument(); + }); + + it('calls setQueryParams with new scope and resets value when scope changes', async () => { + const { user, mockSetQueryParams } = setup({}); + + const scopeCombobox = screen.getByPlaceholderText( + 'Scope of cluster attribute' + ); + await user.click(scopeCombobox); + await user.click(screen.getByText('region')); + + expect(mockSetQueryParams).toHaveBeenCalledWith({ + clusterAttributeScope: 'region', + clusterAttributeValue: undefined, + }); + }); + + it('calls setQueryParams with new value when cluster attribute value changes', async () => { + const { user, mockSetQueryParams } = setup({ + queryParamsOverrides: { + clusterAttributeScope: 'region', + }, + }); + + const valueCombobox = screen.getByPlaceholderText( + 'Value/name of cluster attribute' + ); + await user.click(valueCombobox); + await user.click(screen.getByText('region0')); + + expect(mockSetQueryParams).toHaveBeenCalledWith({ + clusterAttributeValue: 'region0', + }); + }); + + it('calls setQueryParams with undefined when clearing scope', async () => { + const { user, mockSetQueryParams } = setup({ + queryParamsOverrides: { + clusterAttributeScope: 'region', + clusterAttributeValue: 'region0', + }, + }); + + const scopeCombobox = screen.getByPlaceholderText( + 'Scope of cluster attribute' + ); + await user.click(scopeCombobox); + + // Find and click the clear button (BaseUI Combobox clearable) + const clearButtons = screen.getAllByLabelText('Clear value'); + await user.click(clearButtons[0]); + + expect(mockSetQueryParams).toHaveBeenCalledWith({ + clusterAttributeScope: undefined, + clusterAttributeValue: undefined, + }); + }); + + it('calls setQueryParams with undefined when clearing value', async () => { + const { user, mockSetQueryParams } = setup({ + queryParamsOverrides: { + clusterAttributeScope: 'region', + clusterAttributeValue: 'region0', + }, + }); + + const valueCombobox = screen.getByPlaceholderText( + 'Value/name of cluster attribute' + ); + await user.click(valueCombobox); + + // Find and click the clear button + const clearButtons = screen.getAllByLabelText('Clear value'); + await user.click(clearButtons[1]); + + expect(mockSetQueryParams).toHaveBeenCalledWith({ + clusterAttributeValue: undefined, + }); + }); + + it('resets both filters when reset filters button is clicked', async () => { + const { user, mockSetQueryParams } = setup({ + queryParamsOverrides: { + clusterAttributeScope: 'region', + clusterAttributeValue: 'region0', + }, + }); + + const resetButton = screen.getByText('Reset filters'); + await user.click(resetButton); + + expect(mockSetQueryParams).toHaveBeenCalledWith({ + clusterAttributeScope: undefined, + clusterAttributeValue: undefined, + }); + }); + + it('displays current scope value in combobox', () => { + setup({ + queryParamsOverrides: { + clusterAttributeScope: 'region', + }, + }); + + expect( + screen.getByPlaceholderText('Scope of cluster attribute') + ).toHaveValue('region'); + }); + + it('displays current value in cluster attribute value combobox', () => { + setup({ + queryParamsOverrides: { + clusterAttributeScope: 'region', + clusterAttributeValue: 'region0', + }, + }); + + expect( + screen.getByPlaceholderText('Value/name of cluster attribute') + ).toHaveValue('region0'); + }); +}); + +function setup({ + queryParamsOverrides, + domainDescriptionOverrides, +}: { + queryParamsOverrides?: Partial< + PageQueryParamValues + >; + domainDescriptionOverrides?: Partial; +} = {}) { + const mockSetQueryParams = jest.fn(); + + const domainDescription = { + ...mockActiveActiveDomain, + ...domainDescriptionOverrides, + }; + + const queryParams = { + ...mockDomainPageQueryParamsValues, + ...queryParamsOverrides, + }; + + render( + + ); + + const user = userEvent.setup(); + + return { mockSetQueryParams, user }; +} diff --git a/src/views/domain-page/domain-page-failovers-filters/domain-page-failovers-filters.styles.ts b/src/views/domain-page/domain-page-failovers-filters/domain-page-failovers-filters.styles.ts new file mode 100644 index 000000000..c1e832ef7 --- /dev/null +++ b/src/views/domain-page/domain-page-failovers-filters/domain-page-failovers-filters.styles.ts @@ -0,0 +1,56 @@ +import { type Theme } from 'baseui'; +import { styled as createStyled } from 'baseui'; +import { type ButtonOverrides } from 'baseui/button'; +import type { FormControlOverrides } from 'baseui/form-control/types'; +import { type StyleObject } from 'styletron-react'; + +export const styled = { + FiltersContainer: createStyled('div', ({ $theme }) => ({ + display: 'flex', + flexDirection: 'column', + flexWrap: 'wrap', + gap: $theme.sizing.scale500, + [$theme.mediaQuery.medium]: { + flexDirection: 'row', + alignItems: 'flex-end', + }, + marginTop: $theme.sizing.scale950, + marginBottom: $theme.sizing.scale900, + })), + FilterContainer: createStyled('div', ({ $theme }) => ({ + flexGrow: 1, + flexShrink: 1, + flexBasis: '0', + [$theme.mediaQuery.medium]: { + alignSelf: 'flex-start', + }, + })), +}; + +export const overrides = { + comboboxFormControl: { + Label: { + style: ({ $theme }: { $theme: Theme }): StyleObject => ({ + ...$theme.typography.LabelXSmall, + }), + }, + ControlContainer: { + style: (): StyleObject => ({ + margin: '0px', + }), + }, + } satisfies FormControlOverrides, + clearFiltersButton: { + Root: { + style: ({ $theme }: { $theme: Theme }): StyleObject => ({ + whiteSpace: 'nowrap', + flexGrow: 2, + height: $theme.sizing.scale950, + [$theme.mediaQuery.medium]: { + flexGrow: 0, + alignSelf: 'flex-end', + }, + }), + }, + } satisfies ButtonOverrides, +}; diff --git a/src/views/domain-page/domain-page-failovers-filters/domain-page-failovers-filters.tsx b/src/views/domain-page/domain-page-failovers-filters/domain-page-failovers-filters.tsx new file mode 100644 index 000000000..c834ad95a --- /dev/null +++ b/src/views/domain-page/domain-page-failovers-filters/domain-page-failovers-filters.tsx @@ -0,0 +1,136 @@ +import { useMemo } from 'react'; + +import { Button } from 'baseui/button'; +import { Combobox } from 'baseui/combobox'; +import { FormControl } from 'baseui/form-control'; +import { Delete } from 'baseui/icon'; + +import { + type PageQueryParamSetter, + type PageQueryParamValues, +} from '@/hooks/use-page-query-params/use-page-query-params.types'; +import { type ActiveActiveDomain } from '@/views/shared/active-active/active-active.types'; + +import type domainPageQueryParamsConfig from '../config/domain-page-query-params.config'; +import { PRIMARY_CLUSTER_SCOPE } from '../domain-page-failovers/domain-page-failovers.constants'; + +import { styled, overrides } from './domain-page-failovers-filters.styles'; + +export default function DomainPageFailoversFilters({ + domainDescription, + queryParams, + setQueryParams, +}: { + domainDescription: ActiveActiveDomain; + queryParams: PageQueryParamValues; + setQueryParams: PageQueryParamSetter; +}) { + const { clusterAttributeScope, clusterAttributeValue } = queryParams; + + const clusterAttributeScopes = useMemo( + () => [ + PRIMARY_CLUSTER_SCOPE, + ...Object.keys( + domainDescription.activeClusters.activeClustersByClusterAttribute + ), + ], + [domainDescription.activeClusters.activeClustersByClusterAttribute] + ); + + const clusterAttributeValuesForScope = useMemo(() => { + if ( + !clusterAttributeScope || + clusterAttributeScope === PRIMARY_CLUSTER_SCOPE + ) + return []; + + const activeClustersForScope = + domainDescription.activeClusters.activeClustersByClusterAttribute[ + clusterAttributeScope + ]; + + if (!activeClustersForScope) return []; + + return Object.keys(activeClustersForScope.clusterAttributes); + }, [ + clusterAttributeScope, + domainDescription.activeClusters.activeClustersByClusterAttribute, + ]); + + return ( + + + + + setQueryParams({ + clusterAttributeScope: nextValue === '' ? undefined : nextValue, + clusterAttributeValue: undefined, + }) + } + options={clusterAttributeScopes.map((scope) => ({ + id: scope, + }))} + mapOptionToString={(option) => option.id} + /> + + + + + + setQueryParams({ + clusterAttributeValue: nextValue === '' ? undefined : nextValue, + }) + } + options={clusterAttributeValuesForScope.map((scope) => ({ + id: scope, + }))} + mapOptionToString={(option) => option.id} + /> + + + + + ); +} diff --git a/src/views/domain-page/domain-page-failovers/__tests__/domain-page-failovers.test.tsx b/src/views/domain-page/domain-page-failovers/__tests__/domain-page-failovers.test.tsx index 5868c44e2..ff0aeae07 100644 --- a/src/views/domain-page/domain-page-failovers/__tests__/domain-page-failovers.test.tsx +++ b/src/views/domain-page/domain-page-failovers/__tests__/domain-page-failovers.test.tsx @@ -100,6 +100,11 @@ jest.mock( ] ); +jest.mock( + '../../domain-page-failovers-filters/domain-page-failovers-filters', + () => jest.fn(() =>
) +); + const mockFailoverEvent: FailoverEvent = { id: 'failover-1', createdTime: { @@ -171,6 +176,20 @@ describe(DomainPageFailovers.name, () => { expect(await screen.findByText('failover-1')).toBeInTheDocument(); }); + + it('renders filters when domain is active-active', async () => { + await setup({ + domainDescription: mockActiveActiveDomain, + failoverResponse: { + failoverEvents: [], + nextPageToken: '', + }, + }); + + expect( + await screen.findByTestId('domain-page-failovers-filters') + ).toBeInTheDocument(); + }); }); async function setup({ diff --git a/src/views/domain-page/domain-page-failovers/domain-page-failovers.styles.ts b/src/views/domain-page/domain-page-failovers/domain-page-failovers.styles.ts deleted file mode 100644 index 54216b184..000000000 --- a/src/views/domain-page/domain-page-failovers/domain-page-failovers.styles.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { styled as createStyled, type Theme } from 'baseui'; - -export const styled = { - FailoversTableContainer: createStyled( - 'div', - ({ $theme }: { $theme: Theme }) => ({ - paddingTop: $theme.sizing.scale950, - }) - ), -}; diff --git a/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx b/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx index 868c38ac3..bdb5fa8b0 100644 --- a/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx +++ b/src/views/domain-page/domain-page-failovers/domain-page-failovers.tsx @@ -10,10 +10,9 @@ import domainPageFailoversTableActiveActiveConfig from '../config/domain-page-fa import domainPageFailoversTableConfig from '../config/domain-page-failovers-table.config'; import domainPageQueryParamsConfig from '../config/domain-page-query-params.config'; import { type DomainPageTabContentProps } from '../domain-page-content/domain-page-content.types'; +import DomainPageFailoversFilters from '../domain-page-failovers-filters/domain-page-failovers-filters'; import useDomainFailoverHistory from '../hooks/use-domain-failover-history/use-domain-failover-history'; -import { styled } from './domain-page-failovers.styles'; - export default function DomainPageFailovers({ domain, cluster, @@ -25,10 +24,12 @@ export default function DomainPageFailovers({ const isActiveActive = isActiveActiveDomain(domainDescription); - const [{ clusterAttributeScope, clusterAttributeValue }] = usePageQueryParams( + const [queryParams, setQueryParams] = usePageQueryParams( domainPageQueryParamsConfig ); + const { clusterAttributeScope, clusterAttributeValue } = queryParams; + const { filteredFailoverEvents, allFailoverEvents, @@ -50,7 +51,14 @@ export default function DomainPageFailovers({ }); return ( - +
+ {isActiveActive && ( + + )}
0} @@ -68,6 +76,6 @@ export default function DomainPageFailovers({ : domainPageFailoversTableConfig } /> - + ); } From d7ad6ae9fa092fb08781e8a129bbb68ff636335a Mon Sep 17 00:00:00 2001 From: Assem Hafez <137278762+Assem-Uber@users.noreply.github.com> Date: Mon, 17 Nov 2025 16:01:00 +0100 Subject: [PATCH 41/58] feat: Create a grouping utility that only parses new events (#1065) * grouping utility Signed-off-by: Assem Hafez * update test cases Signed-off-by: Assem Hafez * change the api of onChange and add destroy method Signed-off-by: Assem Hafez * replace getGroups with getState Signed-off-by: Assem Hafez * call onchange after updating pending events Signed-off-by: Assem Hafez * use sync processBatch for first page Signed-off-by: Assem Hafez * Update src/views/workflow-history/helpers/workflow-history-grouper.ts Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Apply PR comments * Update src/views/workflow-history/helpers/workflow-history-grouper.types.ts Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Signed-off-by: Assem Hafez Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .../helpers/__tests__/workflow-history-grouper.test.tsx | 7 ------- .../workflow-history/helpers/workflow-history-grouper.ts | 7 ++++--- .../helpers/workflow-history-grouper.types.ts | 5 ----- 3 files changed, 4 insertions(+), 15 deletions(-) diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx index 31851b176..4529096d0 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-grouper.test.tsx @@ -16,8 +16,6 @@ import type { Props, } from '../workflow-history-grouper.types'; -// Commonly used mocks - // Track all setups for cleanup const allCleanups: Array<() => void> = []; @@ -633,11 +631,6 @@ describe(WorkflowHistoryGrouper.name, () => { // First batch is processed immediately, so processedEventsCount should be at least 1 expect(state.processedEventsCount).toBeGreaterThan(0); - // If there are remaining events, status could be 'processing' - if (state.remainingEventsCount > 0) { - expect(state.status).toBe('processing'); - } - // Wait for processing to complete await waitForProcessing(); diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.ts b/src/views/workflow-history/helpers/workflow-history-grouper.ts index 927994041..e0917a0d8 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.ts @@ -93,7 +93,7 @@ export default class WorkflowHistoryGrouper { * Updates pending events (activities and decisions). * This should be called separately from updateEvents. */ - public async updatePendingEvents(params: ProcessEventsParams) { + public updatePendingEvents(params: ProcessEventsParams) { // Update pending events (add new ones, remove stale ones) const currentPendingActivities = this.currentPendingActivities; @@ -184,10 +184,10 @@ export default class WorkflowHistoryGrouper { /** * Schedules the next batch using the best available API. - * Uses Scheduler API if available, otherwise falls back to Promise microtask. + * Uses Scheduler API if available, otherwise falls back to setTimeout. */ private scheduleNextBatch() { - // if first batch process immediately, this helps avoiding UI delays + // If first batch, process immediately; this helps avoid UI delays if (this.lastProcessedEventIndex === -1) { this.processBatch(); } else if ( @@ -200,6 +200,7 @@ export default class WorkflowHistoryGrouper { .postTask(() => this.processBatch(), { priority: 'background' }) .catch(() => { // Fallback to setTimeout if postTask fails + // setTimeout adds the processBatch to Macro Task Queue (lowest priority queue) to allow current microtasks (UI updates) to complete first setTimeout(() => this.processBatch(), 0); }); } else { diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts index 45c4e44b9..782d5b473 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts @@ -11,11 +11,6 @@ export type ProcessEventsParams = { pendingStartDecision: PendingDecisionTaskStartEvent | null; }; -export type ProcessEventsResult = { - groups: HistoryEventsGroups; - lastProcessedEventIndex: number; -}; - /** * Processing status for incremental grouping operations. */ From b301d4a99152886c532bf05de1c9e1884e7bc486 Mon Sep 17 00:00:00 2001 From: Adhitya Mamallan Date: Tue, 18 Nov 2025 10:27:33 +0100 Subject: [PATCH 42/58] feat: Add modal to view full failover event (#1080) * Add modal for viewing an individual failover event, which is shown when "See more" in the table is clicked Signed-off-by: Adhitya Mamallan --- ...omain-page-failover-active-active.test.tsx | 11 + .../domain-page-failover-active-active.tsx | 71 ++-- .../domain-page-failover-modal.test.tsx | 341 ++++++++++++++++++ .../domain-page-failover-modal.styles.ts | 77 ++++ .../domain-page-failover-modal.tsx | 93 +++++ .../domain-page-failover-modal.types.ts | 7 + 6 files changed, 569 insertions(+), 31 deletions(-) create mode 100644 src/views/domain-page/domain-page-failover-modal/__tests__/domain-page-failover-modal.test.tsx create mode 100644 src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.styles.ts create mode 100644 src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.tsx create mode 100644 src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.types.ts diff --git a/src/views/domain-page/domain-page-failover-active-active/__tests__/domain-page-failover-active-active.test.tsx b/src/views/domain-page/domain-page-failover-active-active/__tests__/domain-page-failover-active-active.test.tsx index 42691611a..fa08a60c8 100644 --- a/src/views/domain-page/domain-page-failover-active-active/__tests__/domain-page-failover-active-active.test.tsx +++ b/src/views/domain-page/domain-page-failover-active-active/__tests__/domain-page-failover-active-active.test.tsx @@ -22,6 +22,10 @@ jest.mock( )) ); +jest.mock('../../domain-page-failover-modal/domain-page-failover-modal', () => + jest.fn(() =>
Modal
) +); + describe(DomainPageFailoverActiveActive.name, () => { beforeEach(() => { jest.clearAllMocks(); @@ -61,6 +65,7 @@ describe(DomainPageFailoverActiveActive.name, () => { ).toBeInTheDocument(); expect(screen.getByText('cluster-1 -> cluster-2')).toBeInTheDocument(); expect(screen.getByText('See more')).toBeInTheDocument(); + expect(screen.getByTestId('mock-failover-modal')).toBeInTheDocument(); }); it('renders cluster failover when matching non-primary cluster failover is found', () => { @@ -101,6 +106,7 @@ describe(DomainPageFailoverActiveActive.name, () => { ).toBeInTheDocument(); expect(screen.getByText('cluster-1 -> cluster-2')).toBeInTheDocument(); expect(screen.getByText('See more')).toBeInTheDocument(); + expect(screen.getByTestId('mock-failover-modal')).toBeInTheDocument(); }); it('does not render cluster failover section when clusterAttributeScope is set but clusterAttributeValue is undefined for non-primary scope', () => { @@ -138,6 +144,7 @@ describe(DomainPageFailoverActiveActive.name, () => { screen.queryByTestId('mock-single-cluster-failover') ).not.toBeInTheDocument(); expect(screen.getByText('See more')).toBeInTheDocument(); + expect(screen.getByTestId('mock-failover-modal')).toBeInTheDocument(); }); it('does not render cluster failover section when no matching cluster failover is found', () => { @@ -177,6 +184,7 @@ describe(DomainPageFailoverActiveActive.name, () => { screen.queryByTestId('mock-single-cluster-failover') ).not.toBeInTheDocument(); expect(screen.getByText('See more')).toBeInTheDocument(); + expect(screen.getByTestId('mock-failover-modal')).toBeInTheDocument(); }); it('does not render cluster failover section when clusterAttributeScope is undefined', () => { @@ -211,6 +219,7 @@ describe(DomainPageFailoverActiveActive.name, () => { screen.queryByTestId('mock-single-cluster-failover') ).not.toBeInTheDocument(); expect(screen.getByText('See more')).toBeInTheDocument(); + expect(screen.getByTestId('mock-failover-modal')).toBeInTheDocument(); }); it('renders "See more" button even when no matching cluster failover is found', () => { @@ -233,6 +242,7 @@ describe(DomainPageFailoverActiveActive.name, () => { screen.queryByTestId('mock-single-cluster-failover') ).not.toBeInTheDocument(); expect(screen.getByText('See more')).toBeInTheDocument(); + expect(screen.getByTestId('mock-failover-modal')).toBeInTheDocument(); }); it('selects the correct cluster failover when multiple cluster failovers exist', () => { @@ -286,6 +296,7 @@ describe(DomainPageFailoverActiveActive.name, () => { screen.getByTestId('mock-single-cluster-failover') ).toBeInTheDocument(); expect(screen.getByText('cluster-3 -> cluster-4')).toBeInTheDocument(); + expect(screen.getByTestId('mock-failover-modal')).toBeInTheDocument(); }); }); diff --git a/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.tsx b/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.tsx index c8d5ccc07..3cb3254d6 100644 --- a/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.tsx +++ b/src/views/domain-page/domain-page-failover-active-active/domain-page-failover-active-active.tsx @@ -1,4 +1,4 @@ -import { useMemo } from 'react'; +import { useMemo, useState } from 'react'; import { Button } from 'baseui/button'; import { MdVisibility } from 'react-icons/md'; @@ -6,6 +6,7 @@ import { MdVisibility } from 'react-icons/md'; import usePageQueryParams from '@/hooks/use-page-query-params/use-page-query-params'; import domainPageQueryParamsConfig from '../config/domain-page-query-params.config'; +import DomainPageFailoverModal from '../domain-page-failover-modal/domain-page-failover-modal'; import DomainPageFailoverSingleCluster from '../domain-page-failover-single-cluster/domain-page-failover-single-cluster'; import { PRIMARY_CLUSTER_SCOPE } from '../domain-page-failovers/domain-page-failovers.constants'; import clusterFailoverMatchesAttribute from '../helpers/cluster-failover-matches-attribute'; @@ -16,6 +17,7 @@ import { type Props } from './domain-page-failover-active-active.types'; export default function DomainPageFailoverActiveActive({ failoverEvent, }: Props) { + const [isModalOpen, setIsModalOpen] = useState(false); const [{ clusterAttributeScope, clusterAttributeValue }] = usePageQueryParams( domainPageQueryParamsConfig ); @@ -42,35 +44,42 @@ export default function DomainPageFailoverActiveActive({ ]); return ( - - {clusterFailoverForMaybeSelectedAttribute && ( - - - {clusterAttributeScope === PRIMARY_CLUSTER_SCOPE - ? 'Primary:' - : `${clusterAttributeScope} (${clusterAttributeValue}):`} - - - - )} - - + <> + + {clusterFailoverForMaybeSelectedAttribute && ( + + + {clusterAttributeScope === PRIMARY_CLUSTER_SCOPE + ? 'Primary:' + : `${clusterAttributeScope} (${clusterAttributeValue}):`} + + + + )} + + + setIsModalOpen(false)} + /> + ); } diff --git a/src/views/domain-page/domain-page-failover-modal/__tests__/domain-page-failover-modal.test.tsx b/src/views/domain-page/domain-page-failover-modal/__tests__/domain-page-failover-modal.test.tsx new file mode 100644 index 000000000..4da6d7e41 --- /dev/null +++ b/src/views/domain-page/domain-page-failover-modal/__tests__/domain-page-failover-modal.test.tsx @@ -0,0 +1,341 @@ +import { type ModalProps } from 'baseui/modal'; + +import { render, screen } from '@/test-utils/rtl'; + +import { type FailoverEvent } from '@/route-handlers/list-failover-history/list-failover-history.types'; + +import DomainPageFailoverModal from '../domain-page-failover-modal'; + +jest.mock('baseui/modal', () => ({ + ...jest.requireActual('baseui/modal'), + Modal: ({ isOpen, children }: ModalProps) => + isOpen ? ( +
+ {typeof children === 'function' ? children() : children} +
+ ) : null, +})); + +jest.mock('@/components/formatted-date/formatted-date', () => + jest.fn(({ timestampMs }: { timestampMs: number | null }) => ( +
+ {timestampMs ? new Date(timestampMs).toISOString() : 'No date'} +
+ )) +); + +jest.mock( + '../../domain-page-failover-single-cluster/domain-page-failover-single-cluster', + () => + jest.fn((props: { fromCluster?: string; toCluster?: string }) => + props.fromCluster && props.toCluster ? ( +
+ {`${props.fromCluster} -> ${props.toCluster}`} +
+ ) : null + ) +); + +describe(DomainPageFailoverModal.name, () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('renders modal when isOpen is true', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.getByRole('dialog')).toBeInTheDocument(); + }); + + it('does not render modal when isOpen is false', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [], + }; + + setup({ failoverEvent, isOpen: false }); + + expect(screen.queryByRole('dialog')).not.toBeInTheDocument(); + }); + + it('displays failover ID correctly', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-123', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.getByText('ID')).toBeInTheDocument(); + expect(screen.getByText('failover-123')).toBeInTheDocument(); + }); + + it('displays formatted time when createdTime is provided', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.getByText('Time')).toBeInTheDocument(); + expect(screen.getByTestId('formatted-date')).toBeInTheDocument(); + }); + + it('handles null createdTime', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: null, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.getByText('Time')).toBeInTheDocument(); + expect(screen.getByTestId('formatted-date')).toBeInTheDocument(); + }); + + it('renders table with cluster failovers when clusterFailovers array is not empty', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: null, + }, + ], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.getByText('Scope')).toBeInTheDocument(); + expect(screen.getByText('Attribute')).toBeInTheDocument(); + expect(screen.getByText('Clusters')).toBeInTheDocument(); + expect(screen.getByText('Primary')).toBeInTheDocument(); + expect(screen.getByText('-')).toBeInTheDocument(); + expect( + screen.getByTestId('mock-single-cluster-failover') + ).toBeInTheDocument(); + }); + + it('does not render table when clusterFailovers array is empty', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.queryByText('Scope')).not.toBeInTheDocument(); + expect(screen.queryByText('Attribute')).not.toBeInTheDocument(); + expect(screen.queryByText('Clusters')).not.toBeInTheDocument(); + }); + + it('displays Primary scope and dash for attribute when clusterAttribute is null', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: null, + }, + ], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.getByText('Primary')).toBeInTheDocument(); + expect(screen.getByText('-')).toBeInTheDocument(); + }); + + it('displays scope and attribute name when clusterAttribute is provided', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: { + scope: 'city', + name: 'new_york', + }, + }, + ], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.getByText('city')).toBeInTheDocument(); + expect(screen.getByText('new_york')).toBeInTheDocument(); + }); + + it('renders multiple cluster failovers correctly', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [ + { + fromCluster: { + activeClusterName: 'cluster-1', + failoverVersion: '1', + }, + toCluster: { + activeClusterName: 'cluster-2', + failoverVersion: '2', + }, + clusterAttribute: null, + }, + { + fromCluster: { + activeClusterName: 'cluster-3', + failoverVersion: '3', + }, + toCluster: { + activeClusterName: 'cluster-4', + failoverVersion: '4', + }, + clusterAttribute: { + scope: 'region', + name: 'us-east', + }, + }, + ], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.getByText('Primary')).toBeInTheDocument(); + expect(screen.getByText('region')).toBeInTheDocument(); + expect(screen.getByText('us-east')).toBeInTheDocument(); + const clusterComponents = screen.getAllByTestId( + 'mock-single-cluster-failover' + ); + expect(clusterComponents).toHaveLength(2); + expect(clusterComponents[0]).toHaveTextContent('cluster-1 -> cluster-2'); + expect(clusterComponents[1]).toHaveTextContent('cluster-3 -> cluster-4'); + }); + + it('calls onClose when Close button is clicked', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [], + }; + + const mockOnClose = jest.fn(); + setup({ failoverEvent, isOpen: true, onClose: mockOnClose }); + + const closeButton = screen.getByText('Close'); + closeButton.click(); + + expect(mockOnClose).toHaveBeenCalledTimes(1); + }); + + it('displays modal title correctly', () => { + const failoverEvent: FailoverEvent = { + id: 'failover-1', + createdTime: { + seconds: '1700000000', + nanos: 0, + }, + failoverType: 'FAILOVER_TYPE_GRACEFUL', + clusterFailovers: [], + }; + + setup({ failoverEvent, isOpen: true }); + + expect(screen.getByText('Failover Information')).toBeInTheDocument(); + }); +}); + +function setup({ + failoverEvent, + isOpen = true, + onClose = jest.fn(), +}: { + failoverEvent: FailoverEvent; + isOpen?: boolean; + onClose?: () => void; +}) { + render( + + ); +} diff --git a/src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.styles.ts b/src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.styles.ts new file mode 100644 index 000000000..27a448f53 --- /dev/null +++ b/src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.styles.ts @@ -0,0 +1,77 @@ +import { styled as createStyled, withStyle } from 'baseui'; +import { ModalFooter, ModalHeader, type ModalOverrides } from 'baseui/modal'; +import { type TableOverrides } from 'baseui/table-semantic'; +import { type StyleObject } from 'styletron-react'; + +export const overrides = { + modal: { + Close: { + style: ({ $theme }): StyleObject => ({ + top: $theme.sizing.scale850, + right: $theme.sizing.scale800, + }), + }, + Dialog: { + style: (): StyleObject => ({ + width: '700px', + }), + }, + } satisfies ModalOverrides, + table: { + TableHeadCell: { + style: ({ $theme }): StyleObject => ({ + ...$theme.typography.LabelXSmall, + paddingTop: $theme.sizing.scale300, + paddingBottom: $theme.sizing.scale300, + paddingLeft: $theme.sizing.scale500, + paddingRight: $theme.sizing.scale500, + color: $theme.colors.contentTertiary, + }), + }, + TableBodyCell: { + style: ({ $theme }): StyleObject => ({ + ...$theme.typography.ParagraphXSmall, + paddingTop: $theme.sizing.scale300, + paddingBottom: $theme.sizing.scale300, + paddingLeft: $theme.sizing.scale500, + paddingRight: $theme.sizing.scale500, + }), + }, + } satisfies TableOverrides, +}; + +export const styled = { + ModalHeader: withStyle(ModalHeader, ({ $theme }) => ({ + marginTop: $theme.sizing.scale850, + })), + InfoRow: createStyled('div', ({ $theme }) => ({ + display: 'flex', + gap: $theme.sizing.scale800, + marginBottom: 0, + paddingLeft: $theme.sizing.scale500, + paddingRight: $theme.sizing.scale500, + })), + InfoItem: createStyled('div', ({ $theme }) => ({ + display: 'flex', + flexDirection: 'column', + gap: $theme.sizing.scale200, + })), + InfoLabel: createStyled('div', ({ $theme }) => ({ + ...$theme.typography.LabelXSmall, + color: $theme.colors.contentTertiary, + })), + InfoValue: createStyled('div', ({ $theme }) => ({ + ...$theme.typography.LabelXSmall, + color: $theme.colors.contentPrimary, + })), + TableContainer: createStyled('div', ({ $theme }) => ({ + marginTop: $theme.sizing.scale600, + maxHeight: '50vh', + overflowY: 'auto', + })), + ModalFooter: withStyle(ModalFooter, { + display: 'flex', + flexDirection: 'row-reverse', + justifyContent: 'space-between', + }), +}; diff --git a/src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.tsx b/src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.tsx new file mode 100644 index 000000000..675cd9d45 --- /dev/null +++ b/src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.tsx @@ -0,0 +1,93 @@ +'use client'; +import { useMemo } from 'react'; + +import { Modal, ModalBody, ModalButton } from 'baseui/modal'; +import { Table } from 'baseui/table-semantic'; + +import FormattedDate from '@/components/formatted-date/formatted-date'; +import parseGrpcTimestamp from '@/utils/datetime/parse-grpc-timestamp'; + +import DomainPageFailoverSingleCluster from '../domain-page-failover-single-cluster/domain-page-failover-single-cluster'; + +import { overrides, styled } from './domain-page-failover-modal.styles'; +import { type Props } from './domain-page-failover-modal.types'; + +export default function DomainPageFailoverModal({ + failoverEvent, + isOpen, + onClose, +}: Props) { + const tableRows = useMemo(() => { + return failoverEvent.clusterFailovers.map((clusterFailover) => { + const fromCluster = clusterFailover.fromCluster?.activeClusterName; + const toCluster = clusterFailover.toCluster?.activeClusterName; + const clusters = ( + + ); + + const attribute = clusterFailover.clusterAttribute; + if (attribute === null) { + return { + scope: 'Primary', + attribute: '-', + clusters, + }; + } + + return { + scope: attribute.scope, + attribute: attribute.name, + clusters, + }; + }); + }, [failoverEvent.clusterFailovers]); + + const formattedTime = useMemo(() => { + if (!failoverEvent.createdTime) return null; + return parseGrpcTimestamp(failoverEvent.createdTime); + }, [failoverEvent.createdTime]); + + return ( + + Failover Information + + + + ID + {failoverEvent.id} + + + Time + + + + + + {tableRows.length > 0 && ( + +
+ + )} + + + + Close + + + + ); +} diff --git a/src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.types.ts b/src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.types.ts new file mode 100644 index 000000000..ca4eadf57 --- /dev/null +++ b/src/views/domain-page/domain-page-failover-modal/domain-page-failover-modal.types.ts @@ -0,0 +1,7 @@ +import { type FailoverEvent } from '@/route-handlers/list-failover-history/list-failover-history.types'; + +export type Props = { + failoverEvent: FailoverEvent; + isOpen: boolean; + onClose: () => void; +}; From 4e27d595f5a3e99ae673c01de0c88e42b0679cee Mon Sep 17 00:00:00 2001 From: Adhitya Mamallan Date: Tue, 18 Nov 2025 15:07:05 +0100 Subject: [PATCH 43/58] feat: New components for Workflow History V2 (#1082) * Add new Workflow History V2 root component, which (for now) renders only a placeholder header component and placeholder tables. * Implement Workflow History Header with basic UI and sticky functionality * Add logic in WorkflowHistoryWrapper to render V2 UI based on feature flag Signed-off-by: Adhitya Mamallan --- .../__tests__/workflow-history-v2.test.tsx | 192 ++++++++++++++ .../workflow-history-header.test.tsx | 239 ++++++++++++++++++ .../workflow-history-header.styles.ts | 102 ++++++++ .../workflow-history-header.tsx | 84 ++++++ .../workflow-history-header.types.ts | 26 ++ .../workflow-history-v2.styles.ts | 20 ++ .../workflow-history-v2.tsx | 97 +++++++ .../workflow-history-v2.types.ts | 3 + .../workflow-history-wrapper.test.tsx | 73 ++++++ .../workflow-history-wrapper.tsx | 13 +- 10 files changed, 848 insertions(+), 1 deletion(-) create mode 100644 src/views/workflow-history-v2/__tests__/workflow-history-v2.test.tsx create mode 100644 src/views/workflow-history-v2/workflow-history-header/__tests__/workflow-history-header.test.tsx create mode 100644 src/views/workflow-history-v2/workflow-history-header/workflow-history-header.styles.ts create mode 100644 src/views/workflow-history-v2/workflow-history-header/workflow-history-header.tsx create mode 100644 src/views/workflow-history-v2/workflow-history-header/workflow-history-header.types.ts create mode 100644 src/views/workflow-history-v2/workflow-history-v2.styles.ts create mode 100644 src/views/workflow-history-v2/workflow-history-v2.tsx create mode 100644 src/views/workflow-history-v2/workflow-history-v2.types.ts create mode 100644 src/views/workflow-history/workflow-history-wrapper/__tests__/workflow-history-wrapper.test.tsx diff --git a/src/views/workflow-history-v2/__tests__/workflow-history-v2.test.tsx b/src/views/workflow-history-v2/__tests__/workflow-history-v2.test.tsx new file mode 100644 index 000000000..16e0dac9e --- /dev/null +++ b/src/views/workflow-history-v2/__tests__/workflow-history-v2.test.tsx @@ -0,0 +1,192 @@ +import React from 'react'; + +import { render, screen, userEvent } from '@/test-utils/rtl'; + +import * as usePageFiltersModule from '@/components/page-filters/hooks/use-page-filters'; +import { type WorkflowPageParams } from '@/views/workflow-page/workflow-page.types'; + +import { WorkflowHistoryContext } from '../../workflow-history/workflow-history-context-provider/workflow-history-context-provider'; +import WorkflowHistoryV2 from '../workflow-history-v2'; + +jest.mock('@/components/page-filters/hooks/use-page-filters', () => + jest.fn().mockReturnValue({}) +); + +jest.mock('../workflow-history-header/workflow-history-header', () => + jest.fn(({ isUngroupedHistoryViewEnabled, onClickGroupModeToggle }) => ( +
+
+ {String(isUngroupedHistoryViewEnabled)} +
+ +
+ )) +); + +jest.mock( + '../workflow-history-grouped-table/workflow-history-grouped-table', + () => + jest.fn(() => ( +
Grouped Table
+ )) +); + +jest.mock('@/utils/decode-url-params', () => jest.fn((params) => params)); + +const mockSetQueryParams = jest.fn(); +const mockResetAllFilters = jest.fn(); +const mockSetUngroupedViewUserPreference = jest.fn(); + +describe(WorkflowHistoryV2.name, () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should render WorkflowHistoryHeader', () => { + setup(); + expect(screen.getByTestId('workflow-history-header')).toBeInTheDocument(); + }); + + it('should render grouped table by default when ungroupedHistoryViewEnabled is not set and user preference is false', () => { + setup({ ungroupedViewUserPreference: false }); + expect( + screen.getByTestId('workflow-history-grouped-table') + ).toBeInTheDocument(); + expect(screen.queryByText('WIP: ungrouped table')).not.toBeInTheDocument(); + }); + + it('should render grouped table by default when ungroupedHistoryViewEnabled is not set and user preference is null', () => { + setup({ ungroupedViewUserPreference: null }); + expect( + screen.getByTestId('workflow-history-grouped-table') + ).toBeInTheDocument(); + expect(screen.queryByText('WIP: ungrouped table')).not.toBeInTheDocument(); + }); + + it('should render ungrouped table when ungroupedHistoryViewEnabled query param is true', () => { + setup({ + queryParams: { + ungroupedHistoryViewEnabled: true, + }, + }); + expect(screen.getByText('WIP: ungrouped table')).toBeInTheDocument(); + expect( + screen.queryByTestId('workflow-history-grouped-table') + ).not.toBeInTheDocument(); + }); + + it('should render grouped table when ungroupedHistoryViewEnabled query param is false', () => { + setup({ + queryParams: { + ungroupedHistoryViewEnabled: false, + }, + }); + expect( + screen.getByTestId('workflow-history-grouped-table') + ).toBeInTheDocument(); + expect(screen.queryByText('WIP: ungrouped table')).not.toBeInTheDocument(); + }); + + it('should render ungrouped table when user preference is true and query param is not set', () => { + setup({ ungroupedViewUserPreference: true }); + expect(screen.getByText('WIP: ungrouped table')).toBeInTheDocument(); + expect( + screen.queryByTestId('workflow-history-grouped-table') + ).not.toBeInTheDocument(); + }); + + it('should call setUngroupedViewUserPreference and setQueryParams when toggle is clicked from grouped to ungrouped', async () => { + const { user } = setup({ + queryParams: { + ungroupedHistoryViewEnabled: false, + }, + }); + + const toggleButton = screen.getByTestId('toggle-group-mode'); + await user.click(toggleButton); + + expect(mockSetUngroupedViewUserPreference).toHaveBeenCalledWith(true); + expect(mockSetQueryParams).toHaveBeenCalledWith({ + ungroupedHistoryViewEnabled: 'true', + }); + }); + + it('should call setUngroupedViewUserPreference and setQueryParams when toggle is clicked from ungrouped to grouped', async () => { + const { user } = setup({ + queryParams: { + ungroupedHistoryViewEnabled: true, + }, + }); + + const toggleButton = screen.getByTestId('toggle-group-mode'); + await user.click(toggleButton); + + expect(mockSetUngroupedViewUserPreference).toHaveBeenCalledWith(false); + expect(mockSetQueryParams).toHaveBeenCalledWith({ + ungroupedHistoryViewEnabled: 'false', + }); + }); +}); + +function setup({ + params = { + cluster: 'test-cluster', + domain: 'test-domain', + workflowId: 'test-workflow-id', + runId: 'test-run-id', + }, + queryParams = { + historyEventTypes: undefined, + historyEventStatuses: undefined, + historySelectedEventId: undefined, + ungroupedHistoryViewEnabled: undefined, + }, + ungroupedViewUserPreference = false, +}: { + params?: WorkflowPageParams; + queryParams?: { + historyEventTypes?: unknown; + historyEventStatuses?: unknown; + historySelectedEventId?: unknown; + ungroupedHistoryViewEnabled?: boolean; + activeFiltersCount?: number; + }; + ungroupedViewUserPreference?: boolean | null; +} = {}) { + const user = userEvent.setup(); + + jest.spyOn(usePageFiltersModule, 'default').mockReturnValue({ + resetAllFilters: mockResetAllFilters, + activeFiltersCount: queryParams.activeFiltersCount ?? 0, + queryParams: { + historyEventTypes: queryParams.historyEventTypes, + historyEventStatuses: queryParams.historyEventStatuses, + historySelectedEventId: queryParams.historySelectedEventId, + ungroupedHistoryViewEnabled: queryParams.ungroupedHistoryViewEnabled, + }, + setQueryParams: mockSetQueryParams, + }); + + const mockContextValue = { + ungroupedViewUserPreference, + setUngroupedViewUserPreference: mockSetUngroupedViewUserPreference, + }; + + const renderResult = render( + + + + ); + + return { + user, + ...renderResult, + }; +} diff --git a/src/views/workflow-history-v2/workflow-history-header/__tests__/workflow-history-header.test.tsx b/src/views/workflow-history-v2/workflow-history-header/__tests__/workflow-history-header.test.tsx new file mode 100644 index 000000000..b190babf7 --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-header/__tests__/workflow-history-header.test.tsx @@ -0,0 +1,239 @@ +import React from 'react'; + +import { + mockIsIntersecting, + intersectionMockInstance, +} from 'react-intersection-observer/test-utils'; + +import { act, render, screen, userEvent } from '@/test-utils/rtl'; + +import WorkflowHistoryHeader from '../workflow-history-header'; +import { type Props } from '../workflow-history-header.types'; + +jest.mock( + '@/views/workflow-history/workflow-history-export-json-button/workflow-history-export-json-button', + () => jest.fn(() => ) +); + +jest.mock( + '@/components/page-filters/page-filters-toggle/page-filters-toggle', + () => + jest.fn(({ onClick, isActive, activeFiltersCount }) => ( + + )) +); + +describe(WorkflowHistoryHeader.name, () => { + it('should render the header with title', () => { + setup(); + expect(screen.getByText('Workflow history')).toBeInTheDocument(); + }); + + it('should render export JSON button', () => { + setup(); + expect(screen.getByText('Export JSON')).toBeInTheDocument(); + }); + + it('should render segmented control with grouped and ungrouped segments', () => { + setup(); + expect(screen.getByText('Grouped')).toBeInTheDocument(); + expect(screen.getByText('Ungrouped')).toBeInTheDocument(); + }); + + it('should call onClickGroupModeToggle when segmented control segment is clicked', async () => { + const { user, mockOnClickGroupModeToggle } = setup(); + const groupedSegment = screen.getByText('Grouped'); + + await user.click(groupedSegment); + + expect(mockOnClickGroupModeToggle).toHaveBeenCalledTimes(1); + }); + + it('should render filters toggle button', () => { + setup(); + expect(screen.getByTestId('page-filters-toggle')).toBeInTheDocument(); + }); + + it('should show filters toggle as inactive by default', () => { + setup(); + const filtersToggle = screen.getByTestId('page-filters-toggle'); + expect(filtersToggle).toHaveTextContent('Show Filters (0)'); + }); + + it('should toggle filters visibility when filter toggle is clicked', async () => { + const { user } = setup(); + const filtersToggle = screen.getByTestId('page-filters-toggle'); + + expect(filtersToggle).toHaveTextContent('Show Filters (0)'); + + await user.click(filtersToggle); + + expect(filtersToggle).toHaveTextContent('Hide Filters (0)'); + + await user.click(filtersToggle); + + expect(filtersToggle).toHaveTextContent('Show Filters (0)'); + }); + + it('should display active filters count in filters toggle', () => { + setup({ + pageFiltersProps: { + activeFiltersCount: 3, + queryParams: { + historyEventTypes: undefined, + historyEventStatuses: undefined, + historySelectedEventId: undefined, + ungroupedHistoryViewEnabled: undefined, + }, + setQueryParams: jest.fn(), + resetAllFilters: jest.fn(), + }, + }); + const filtersToggle = screen.getByTestId('page-filters-toggle'); + expect(filtersToggle).toHaveTextContent('Show Filters (3)'); + }); + + it('should render sentinel when sticky is enabled', () => { + setup({ isStickyEnabled: true }); + expect(screen.getByTestId('sentinel')).toBeInTheDocument(); + }); + + it('should not render sentinel when sticky is disabled', () => { + setup({ isStickyEnabled: false }); + expect(screen.queryByTestId('sentinel')).not.toBeInTheDocument(); + }); + + it('should set up intersection observer when sticky is enabled', () => { + setup({ isStickyEnabled: true }); + + const sentinel = screen.getByTestId('sentinel'); + const instance = intersectionMockInstance(sentinel); + expect(instance.observe).toHaveBeenCalledWith(sentinel); + }); + + it('should not set up intersection observer when sticky is disabled', () => { + setup({ isStickyEnabled: false }); + + const sentinel = screen.queryByTestId('sentinel'); + expect(sentinel).not.toBeInTheDocument(); + }); + + it('should set sticky state to false when sentinel is in view', () => { + setup({ isStickyEnabled: true }); + + const wrapper = screen.getByTestId('workflow-history-header-wrapper'); + const sentinel = screen.getByTestId('sentinel'); + + expect(wrapper).toHaveAttribute('data-is-sticky', 'false'); + + act(() => { + mockIsIntersecting(sentinel, 1); + }); + + expect(wrapper).toHaveAttribute('data-is-sticky', 'false'); + }); + + it('should set sticky state to true when sentinel is out of view', () => { + setup({ isStickyEnabled: true }); + + const wrapper = screen.getByTestId('workflow-history-header-wrapper'); + const sentinel = screen.getByTestId('sentinel'); + + expect(wrapper).toHaveAttribute('data-is-sticky', 'false'); + + act(() => { + mockIsIntersecting(sentinel, 0); + }); + + expect(wrapper).toHaveAttribute('data-is-sticky', 'true'); + }); + + it('should toggle sticky state when sentinel visibility changes', () => { + setup({ isStickyEnabled: true }); + + const wrapper = screen.getByTestId('workflow-history-header-wrapper'); + const sentinel = screen.getByTestId('sentinel'); + + expect(wrapper).toHaveAttribute('data-is-sticky', 'false'); + + act(() => { + mockIsIntersecting(sentinel, 0); + }); + + expect(wrapper).toHaveAttribute('data-is-sticky', 'true'); + + act(() => { + mockIsIntersecting(sentinel, 1); + }); + + expect(wrapper).toHaveAttribute('data-is-sticky', 'false'); + }); + + it('should disable sticky when isStickyEnabled changes to false', () => { + const { rerender } = setup({ isStickyEnabled: true }); + + const wrapper = screen.getByTestId('workflow-history-header-wrapper'); + const sentinel = screen.getByTestId('sentinel'); + + act(() => { + mockIsIntersecting(sentinel, 0); + }); + + expect(wrapper).toHaveAttribute('data-is-sticky', 'true'); + + rerender( + + ); + + expect(screen.queryByTestId('sentinel')).not.toBeInTheDocument(); + const newWrapper = screen.getByTestId('workflow-history-header-wrapper'); + expect(newWrapper).toHaveAttribute('data-is-sticky', 'false'); + }); +}); + +function setup(props: Partial = {}) { + const user = userEvent.setup(); + const mockOnClickGroupModeToggle = jest.fn(); + + const defaultProps = getDefaultProps(); + const mergedProps = { + ...defaultProps, + onClickGroupModeToggle: mockOnClickGroupModeToggle, + ...props, + }; + + const renderResult = render(); + + return { + user, + mockOnClickGroupModeToggle, + ...renderResult, + }; +} + +function getDefaultProps(): Props { + return { + isUngroupedHistoryViewEnabled: false, + onClickGroupModeToggle: jest.fn(), + wfHistoryRequestArgs: { + domain: 'test-domain', + cluster: 'test-cluster', + workflowId: 'test-workflowId', + runId: 'test-runId', + }, + pageFiltersProps: { + activeFiltersCount: 0, + queryParams: { + historyEventTypes: undefined, + historyEventStatuses: undefined, + historySelectedEventId: undefined, + ungroupedHistoryViewEnabled: undefined, + }, + setQueryParams: jest.fn(), + resetAllFilters: jest.fn(), + }, + isStickyEnabled: true, + }; +} diff --git a/src/views/workflow-history-v2/workflow-history-header/workflow-history-header.styles.ts b/src/views/workflow-history-v2/workflow-history-header/workflow-history-header.styles.ts new file mode 100644 index 000000000..daf52945f --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-header/workflow-history-header.styles.ts @@ -0,0 +1,102 @@ +import { styled as createStyled, type Theme } from 'baseui'; +import { + type SegmentOverrides, + type SegmentedControlOverrides, +} from 'baseui/segmented-control'; +import { type StyleObject } from 'styletron-react'; + +export const styled = { + Sentinel: createStyled('div', { + height: '1px', + visibility: 'hidden', + }), + Container: createStyled< + 'div', + { $isSticky?: boolean; $isStickyEnabled?: boolean } + >( + 'div', + ({ $theme, $isSticky, $isStickyEnabled }): StyleObject => ({ + paddingTop: $theme.sizing.scale600, + paddingBottom: $theme.sizing.scale600, + marginTop: `-${$theme.sizing.scale600}`, + backgroundColor: $theme.colors.backgroundPrimary, + transition: 'box-shadow 0.2s ease-in-out', + // Non-sticky by default or when disabled + position: 'static', + boxShadow: 'none', + // Sticky only on medium screens and up when enabled + ...($isStickyEnabled && { + [$theme.mediaQuery.medium]: { + position: 'sticky', + top: 0, + boxShadow: $isSticky ? $theme.lighting.shallowBelow : 'none', + zIndex: 1, + }, + }), + }) + ), + Header: createStyled( + 'div', + ({ $theme }: { $theme: Theme }): StyleObject => ({ + display: 'flex', + flexDirection: 'column', + justifyContent: 'space-between', + flexWrap: 'wrap', + gap: $theme.sizing.scale500, + [$theme.mediaQuery.medium]: { + alignItems: 'center', + flexDirection: 'row', + }, + }) + ), + Actions: createStyled( + 'div', + ({ $theme }: { $theme: Theme }): StyleObject => ({ + display: 'flex', + flexDirection: 'column', + flexWrap: 'wrap', + gap: $theme.sizing.scale500, + [$theme.mediaQuery.medium]: { + flexDirection: 'row', + }, + }) + ), +}; + +export const overrides = { + groupToggle: { + Root: { + style: ({ $theme }: { $theme: Theme }): StyleObject => ({ + height: $theme.sizing.scale950, + padding: $theme.sizing.scale0, + borderRadius: $theme.borders.radius300, + ...$theme.typography.ParagraphSmall, + width: 'auto', + flexGrow: 1, + [$theme.mediaQuery.medium]: { + flexGrow: 0, + }, + }), + }, + SegmentList: { + style: ({ $theme }: { $theme: Theme }): StyleObject => ({ + height: $theme.sizing.scale950, + ...$theme.typography.ParagraphSmall, + }), + }, + Active: { + style: ({ $theme }: { $theme: Theme }): StyleObject => ({ + height: $theme.sizing.scale900, + top: 0, + }), + }, + } satisfies SegmentedControlOverrides, + groupToggleSegment: { + Segment: { + style: ({ $theme }: { $theme: Theme }): StyleObject => ({ + height: $theme.sizing.scale900, + whiteSpace: 'nowrap', + }), + }, + } satisfies SegmentOverrides, +}; diff --git a/src/views/workflow-history-v2/workflow-history-header/workflow-history-header.tsx b/src/views/workflow-history-v2/workflow-history-header/workflow-history-header.tsx new file mode 100644 index 000000000..1f53dcd9a --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-header/workflow-history-header.tsx @@ -0,0 +1,84 @@ +import React, { useEffect, useState } from 'react'; + +import { SegmentedControl, Segment } from 'baseui/segmented-control'; +import { HeadingXSmall } from 'baseui/typography'; +import { useInView } from 'react-intersection-observer'; + +import PageFiltersToggle from '@/components/page-filters/page-filters-toggle/page-filters-toggle'; +import PageSection from '@/components/page-section/page-section'; +import WorkflowHistoryExportJsonButton from '@/views/workflow-history/workflow-history-export-json-button/workflow-history-export-json-button'; + +import { overrides, styled } from './workflow-history-header.styles'; +import { type Props } from './workflow-history-header.types'; + +export default function WorkflowHistoryHeader({ + isUngroupedHistoryViewEnabled, + onClickGroupModeToggle, + wfHistoryRequestArgs, + pageFiltersProps, + isStickyEnabled = true, +}: Props) { + const [areFiltersShown, setAreFiltersShown] = useState(false); + + const [isSticky, setIsSticky] = useState(false); + useEffect(() => { + if (!isStickyEnabled && isSticky) setIsSticky(false); + }, [isStickyEnabled, isSticky]); + + const { ref: sentinelRef } = useInView({ + threshold: 1, + onChange: (inView) => { + setIsSticky(!inView); + }, + skip: !isStickyEnabled, + }); + + const { activeFiltersCount } = pageFiltersProps; + + return ( + <> + {isStickyEnabled && ( + + )} + + + + Workflow history + + + onClickGroupModeToggle()} + overrides={overrides.groupToggle} + > + + + + setAreFiltersShown((v) => !v)} + isActive={areFiltersShown} + /> + + + + + + ); +} diff --git a/src/views/workflow-history-v2/workflow-history-header/workflow-history-header.types.ts b/src/views/workflow-history-v2/workflow-history-header/workflow-history-header.types.ts new file mode 100644 index 000000000..032615891 --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-header/workflow-history-header.types.ts @@ -0,0 +1,26 @@ +import { + type PageQueryParamSetter, + type PageQueryParamValues, +} from '@/hooks/use-page-query-params/use-page-query-params.types'; +import { type Props as WorkflowHistoryExportJsonButtonProps } from '@/views/workflow-history/workflow-history-export-json-button/workflow-history-export-json-button.types'; + +import type workflowPageQueryParamsConfig from '../../workflow-page/config/workflow-page-query-params.config'; + +type WorkflowPageQueryParamsConfig = typeof workflowPageQueryParamsConfig; + +export type WorkflowHistoryGroupMode = 'grouped' | 'ungrouped'; + +type PageFiltersProps = { + resetAllFilters: () => void; + activeFiltersCount: number; + queryParams: PageQueryParamValues; + setQueryParams: PageQueryParamSetter; +}; + +export type Props = { + isUngroupedHistoryViewEnabled: boolean; + onClickGroupModeToggle: () => void; + wfHistoryRequestArgs: WorkflowHistoryExportJsonButtonProps; + pageFiltersProps: PageFiltersProps; + isStickyEnabled?: boolean; +}; diff --git a/src/views/workflow-history-v2/workflow-history-v2.styles.ts b/src/views/workflow-history-v2/workflow-history-v2.styles.ts new file mode 100644 index 000000000..ecdc60e45 --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-v2.styles.ts @@ -0,0 +1,20 @@ +import { styled as createStyled, withStyle } from 'baseui'; + +import PageSection from '@/components/page-section/page-section'; + +export const styled = { + Container: createStyled('div', { + display: 'flex', + flexDirection: 'column', + flex: 1, + // This ensures the header section's z-index is relative to this container and does not + // appear above external elements like popovers and modals. + position: 'relative', + zIndex: 0, + }), + ContentSection: withStyle(PageSection, { + display: 'flex', + flexDirection: 'column', + flex: 1, + }), +}; diff --git a/src/views/workflow-history-v2/workflow-history-v2.tsx b/src/views/workflow-history-v2/workflow-history-v2.tsx new file mode 100644 index 000000000..6f33a88db --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-v2.tsx @@ -0,0 +1,97 @@ +import { useCallback, useContext, useMemo } from 'react'; + +import usePageFilters from '@/components/page-filters/hooks/use-page-filters'; +import decodeUrlParams from '@/utils/decode-url-params'; + +import workflowHistoryFiltersConfig from '../workflow-history/config/workflow-history-filters.config'; +import WORKFLOW_HISTORY_PAGE_SIZE_CONFIG from '../workflow-history/config/workflow-history-page-size.config'; +import { WorkflowHistoryContext } from '../workflow-history/workflow-history-context-provider/workflow-history-context-provider'; +import workflowPageQueryParamsConfig from '../workflow-page/config/workflow-page-query-params.config'; +import { type WorkflowPageTabContentParams } from '../workflow-page/workflow-page-tab-content/workflow-page-tab-content.types'; + +import WorkflowHistoryGroupedTable from './workflow-history-grouped-table/workflow-history-grouped-table'; +import WorkflowHistoryHeader from './workflow-history-header/workflow-history-header'; +import { styled } from './workflow-history-v2.styles'; +import { type Props } from './workflow-history-v2.types'; + +export default function WorkflowHistoryV2({ params }: Props) { + const decodedParams = decodeUrlParams(params); + + const { workflowTab, ...historyQueryParams } = decodedParams; + const wfHistoryRequestArgs = { + ...historyQueryParams, + pageSize: WORKFLOW_HISTORY_PAGE_SIZE_CONFIG, + waitForNewEvent: true, + }; + + const { + activeFiltersCount, + queryParams, + setQueryParams, + ...pageFiltersRest + } = usePageFilters({ + pageQueryParamsConfig: workflowPageQueryParamsConfig, + pageFiltersConfig: workflowHistoryFiltersConfig, + }); + + const { ungroupedViewUserPreference, setUngroupedViewUserPreference } = + useContext(WorkflowHistoryContext); + + const isUngroupedHistoryViewEnabled = useMemo(() => { + if (queryParams.ungroupedHistoryViewEnabled !== undefined) + return queryParams.ungroupedHistoryViewEnabled; + + return ungroupedViewUserPreference ?? false; + }, [queryParams.ungroupedHistoryViewEnabled, ungroupedViewUserPreference]); + + const onClickGroupModeToggle = useCallback(() => { + setUngroupedViewUserPreference(!isUngroupedHistoryViewEnabled); + + setQueryParams({ + ungroupedHistoryViewEnabled: isUngroupedHistoryViewEnabled + ? 'false' + : 'true', + }); + + // TODO: set timeline list visible range depending on what was visible before, + // once the grouped and ungrouped tables have been fully implemented. + + // History V1 code below for reference + + // setTimelineListVisibleRange(() => ({ + // startIndex: -1, + // endIndex: -1, + // compactStartIndex: -1, + // compactEndIndex: -1, + // ungroupedStartIndex: -1, + // ungroupedEndIndex: -1, + // })); + }, [ + isUngroupedHistoryViewEnabled, + setQueryParams, + setUngroupedViewUserPreference, + ]); + + return ( + + + + {isUngroupedHistoryViewEnabled ? ( +
WIP: ungrouped table
+ ) : ( + + )} +
+
+ ); +} diff --git a/src/views/workflow-history-v2/workflow-history-v2.types.ts b/src/views/workflow-history-v2/workflow-history-v2.types.ts new file mode 100644 index 000000000..ceb7f53d1 --- /dev/null +++ b/src/views/workflow-history-v2/workflow-history-v2.types.ts @@ -0,0 +1,3 @@ +import { type WorkflowPageTabContentProps } from '../workflow-page/workflow-page-tab-content/workflow-page-tab-content.types'; + +export type Props = WorkflowPageTabContentProps; diff --git a/src/views/workflow-history/workflow-history-wrapper/__tests__/workflow-history-wrapper.test.tsx b/src/views/workflow-history/workflow-history-wrapper/__tests__/workflow-history-wrapper.test.tsx new file mode 100644 index 000000000..b67936f75 --- /dev/null +++ b/src/views/workflow-history/workflow-history-wrapper/__tests__/workflow-history-wrapper.test.tsx @@ -0,0 +1,73 @@ +import React, { Suspense } from 'react'; + +import { HttpResponse } from 'msw'; + +import { render, screen, waitForElementToBeRemoved } from '@/test-utils/rtl'; + +import { type GetConfigResponse } from '@/route-handlers/get-config/get-config.types'; + +import { type Props } from '../../workflow-history.types'; +import WorkflowHistoryWrapper from '../workflow-history-wrapper'; + +jest.mock('../../../workflow-history-v2/workflow-history-v2', () => + jest.fn(() => ( +
Workflow History V2
+ )) +); + +jest.mock('../../workflow-history', () => + jest.fn(() =>
Workflow History V1
) +); + +describe(WorkflowHistoryWrapper.name, () => { + it('should render WorkflowHistoryV2 when HISTORY_PAGE_V2_ENABLED is true', async () => { + await setup({ isHistoryPageV2Enabled: true }); + + expect(screen.getByTestId('workflow-history-v2')).toBeInTheDocument(); + expect(screen.queryByTestId('workflow-history')).not.toBeInTheDocument(); + }); + + it('should render WorkflowHistory when HISTORY_PAGE_V2_ENABLED is false', async () => { + await setup({ isHistoryPageV2Enabled: false }); + + expect(screen.getByTestId('workflow-history')).toBeInTheDocument(); + expect(screen.queryByTestId('workflow-history-v2')).not.toBeInTheDocument(); + }); +}); + +async function setup({ + isHistoryPageV2Enabled = false, + props = { + params: { + cluster: 'test-cluster', + domain: 'test-domain', + workflowId: 'test-workflow-id', + runId: 'test-run-id', + workflowTab: 'history' as const, + }, + }, +}: { + isHistoryPageV2Enabled?: boolean; + props?: Props; +} = {}) { + render( + Loading...}> + + , + { + endpointsMocks: [ + { + path: '/api/config', + httpMethod: 'GET', + mockOnce: false, + httpResolver: async () => + HttpResponse.json( + isHistoryPageV2Enabled satisfies GetConfigResponse<'HISTORY_PAGE_V2_ENABLED'> + ), + }, + ], + } + ); + + await waitForElementToBeRemoved(() => screen.queryAllByText('Loading...')); +} diff --git a/src/views/workflow-history/workflow-history-wrapper/workflow-history-wrapper.tsx b/src/views/workflow-history/workflow-history-wrapper/workflow-history-wrapper.tsx index fc47b6e7e..780439b53 100644 --- a/src/views/workflow-history/workflow-history-wrapper/workflow-history-wrapper.tsx +++ b/src/views/workflow-history/workflow-history-wrapper/workflow-history-wrapper.tsx @@ -1,11 +1,22 @@ +import useSuspenseConfigValue from '@/hooks/use-config-value/use-suspense-config-value'; +import WorkflowHistoryV2 from '@/views/workflow-history-v2/workflow-history-v2'; + import WorkflowHistory from '../workflow-history'; import WorkflowHistoryContextProvider from '../workflow-history-context-provider/workflow-history-context-provider'; import { type Props } from '../workflow-history.types'; export default function WorkflowHistoryWrapper(props: Props) { + const { data: isHistoryPageV2Enabled } = useSuspenseConfigValue( + 'HISTORY_PAGE_V2_ENABLED' + ); + return ( - + {isHistoryPageV2Enabled ? ( + + ) : ( + + )} ); } From d994f9d6e44e1f9e19f8240dcedd386e4bd8f66f Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 01:05:20 +0000 Subject: [PATCH 44/58] update test cases --- .../use-workflow-history-grouper.test.tsx | 515 +++++++----------- .../hooks/use-workflow-history-grouper.ts | 15 +- 2 files changed, 196 insertions(+), 334 deletions(-) diff --git a/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx b/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx index 6a2ace345..a10efff1b 100644 --- a/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx +++ b/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx @@ -2,6 +2,10 @@ import { renderHook, act } from '@testing-library/react'; import type { HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; +import { + mockActivityEventGroup, + mockDecisionEventGroup, +} from '../../__fixtures__/workflow-history-event-groups'; import { pendingActivityTaskStartEvent, pendingDecisionTaskStartEvent, @@ -13,401 +17,262 @@ import type { } from '../../helpers/workflow-history-grouper.types'; import useWorkflowHistoryGrouper from '../use-workflow-history-grouper'; -// Mock the HistoryEventsGrouper jest.mock('../../helpers/workflow-history-grouper'); -// Mock useThrottledState to disable throttling in tests -jest.mock('@/hooks/use-throttled-state', () => { - const { useState } = jest.requireActual('react'); - return jest.fn((initialValue) => { - const [state, setState] = useState(initialValue); - const setStateWrapper = ( - callback: (prev: any) => any, - _executeImmediately?: boolean - ) => { - setState((prev: any) => callback(prev)); - }; - return [state, setStateWrapper]; - }); -}); - describe(useWorkflowHistoryGrouper.name, () => { - let mockGrouper: jest.Mocked; - let mockOnChangeCallback: (state: GroupingProcessState) => void; - - const createMockState = ( - overrides?: Partial - ): GroupingProcessState => ({ - groups: {}, - processedEventsCount: 0, - remainingEventsCount: 0, - status: 'idle', - ...overrides, - }); - - beforeEach(() => { - // Reset the mock implementation before each test - mockOnChangeCallback = jest.fn(); - - mockGrouper = { - getState: jest.fn(), - onChange: jest.fn(), - updateEvents: jest.fn(), - updatePendingEvents: jest.fn(), - destroy: jest.fn(), - } as any; - - // Mock the constructor to return our mock grouper - ( - HistoryEventsGrouper as jest.MockedClass - ).mockImplementation(() => mockGrouper); - - // Default mock implementations - mockGrouper.getState.mockReturnValue(createMockState()); - mockGrouper.onChange.mockImplementation((callback) => { - mockOnChangeCallback = callback; - return jest.fn(); // Return unsubscribe function - }); - }); - afterEach(() => { jest.clearAllMocks(); }); - describe('initialization', () => { - it('should create HistoryEventsGrouper with default batchSize', () => { - renderHook(() => useWorkflowHistoryGrouper()); + it('should create HistoryEventsGrouper with default batchSize', () => { + setup(); - expect(HistoryEventsGrouper).toHaveBeenCalledWith({ - batchSize: 300, - }); + expect(HistoryEventsGrouper).toHaveBeenCalledWith({ + batchSize: 300, }); + }); - it('should initialize with state from grouper.getState()', () => { - const mockState = createMockState({ - groups: { group1: { groupType: 'Activity' } as any }, + it('should initialize with state from grouper.getState()', () => { + const { + result: { current }, + mockGrouperInstance, + } = setup({ + initialState: { + groups: { + group1: mockActivityEventGroup, + }, processedEventsCount: 10, - }); - mockGrouper.getState.mockReturnValue(mockState); - - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - expect(mockGrouper.getState).toHaveBeenCalled(); - expect(result.current.eventGroups).toEqual(mockState.groups); - expect(result.current.groupingState).toEqual(mockState); + }, }); - it('should subscribe to grouper onChange', () => { - renderHook(() => useWorkflowHistoryGrouper()); - - expect(mockGrouper.onChange).toHaveBeenCalledWith(expect.any(Function)); + expect(mockGrouperInstance.getState).toHaveBeenCalled(); + expect(current.eventGroups).toEqual({ + group1: mockActivityEventGroup, }); - - it('should return empty groups when groupingState is null', () => { - mockGrouper.getState.mockReturnValue(null as any); - - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - expect(result.current.eventGroups).toEqual({}); + expect(current.groupingState).toMatchObject({ + groups: { + group1: mockActivityEventGroup, + }, + processedEventsCount: 10, }); }); - describe('custom throttleMs', () => { - it('should accept custom throttle time', () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper(5000)); + it('should subscribe to grouper onChange', () => { + const { mockGrouperInstance } = setup(); - expect(result.current).toBeDefined(); - }); + expect(mockGrouperInstance.onChange).toHaveBeenCalledWith( + expect.any(Function) + ); }); - describe('onChange subscription', () => { - it('should update groupingState when onChange callback is triggered', () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - const newState = createMockState({ - groups: { group1: { groupType: 'Decision' } as any }, - processedEventsCount: 5, - status: 'processing', - }); - - act(() => { - mockOnChangeCallback(newState); - }); + it('should update groupingState when onChange callback is triggered', () => { + const { result, getMockOnChangeCallback } = setup(); - expect(result.current.groupingState).toEqual(newState); - expect(result.current.eventGroups).toEqual(newState.groups); - expect(result.current.isProcessing).toBe(true); + const newState = createMockState({ + groups: { + group1: mockDecisionEventGroup, + }, + processedEventsCount: 5, + status: 'processing', }); - it('should set isProcessing to false when status is idle', () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - const idleState = createMockState({ - status: 'idle', - }); - - act(() => { - mockOnChangeCallback(idleState); - }); - - expect(result.current.isProcessing).toBe(false); + act(() => { + const mockOnChangeCallback = getMockOnChangeCallback(); + mockOnChangeCallback(newState); }); - it('should set isProcessing to true when status is processing', () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - const processingState = createMockState({ - status: 'processing', - }); + expect(result.current.groupingState).toEqual(newState); + expect(result.current.eventGroups).toEqual(newState.groups); + expect(result.current.isProcessing).toBe(true); + }); - act(() => { - mockOnChangeCallback(processingState); - }); + it('should set isProcessing to false when status is idle', () => { + const { result, getMockOnChangeCallback } = setup(); - expect(result.current.isProcessing).toBe(true); + const idleState = createMockState({ + status: 'idle', }); - it('should update state immediately when onChange is called', () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - const newState = createMockState({ - processedEventsCount: 50, - groups: { group1: { groupType: 'Activity' } as any }, - }); - - act(() => { - mockOnChangeCallback(newState); - }); - - expect(result.current.groupingState).toEqual(newState); + act(() => { + const mockOnChangeCallback = getMockOnChangeCallback(); + mockOnChangeCallback(idleState); }); - }); - describe('updateEvents', () => { - it('should call grouper.updateEvents with provided events', () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - const mockEvents: HistoryEvent[] = [ - { eventId: '1', eventTime: null } as HistoryEvent, - { eventId: '2', eventTime: null } as HistoryEvent, - ]; + expect(result.current.isProcessing).toBe(false); + }); - act(() => { - result.current.updateEvents(mockEvents); - }); + it('should set isProcessing to true when status is processing', () => { + const { result, getMockOnChangeCallback } = setup(); - expect(mockGrouper.updateEvents).toHaveBeenCalledWith(mockEvents); + const processingState = createMockState({ + status: 'processing', }); - it('should handle empty events array', () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - act(() => { - result.current.updateEvents([]); - }); - - expect(mockGrouper.updateEvents).toHaveBeenCalledWith([]); + act(() => { + const mockOnChangeCallback = getMockOnChangeCallback(); + mockOnChangeCallback(processingState); }); - it('should not throw if grouper is not initialized', () => { - // This shouldn't happen in practice, but test defensive coding - mockGrouper.updateEvents.mockImplementation(() => { - throw new Error('Grouper not initialized'); - }); - - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - expect(() => { - act(() => { - result.current.updateEvents([]); - }); - }).toThrow(); - }); + expect(result.current.isProcessing).toBe(true); }); - describe('updatePendingEvents', () => { - it('should call grouper.updatePendingEvents with provided params', async () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); + it('should call grouper.updateEvents with provided events', () => { + const { + result: { current }, + mockGrouperInstance, + } = setup(); - const params: ProcessEventsParams = { - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: pendingDecisionTaskStartEvent, - }; + const mockEvents: HistoryEvent[] = [ + { eventId: '1', eventTime: null } as HistoryEvent, + { eventId: '2', eventTime: null } as HistoryEvent, + ]; - await act(async () => { - await result.current.updatePendingEvents(params); - }); - - expect(mockGrouper.updatePendingEvents).toHaveBeenCalledWith(params); + act(() => { + current.updateEvents(mockEvents); }); - it('should handle empty pending events', async () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); + expect(mockGrouperInstance.updateEvents).toHaveBeenCalledWith(mockEvents); + }); - const params: ProcessEventsParams = { - pendingStartActivities: [], - pendingStartDecision: null, - }; + it('should call grouper.updatePendingEvents with provided params', async () => { + const { + result: { current }, + mockGrouperInstance, + } = setup(); - await act(async () => { - await result.current.updatePendingEvents(params); - }); + const params: ProcessEventsParams = { + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: pendingDecisionTaskStartEvent, + }; - expect(mockGrouper.updatePendingEvents).toHaveBeenCalledWith(params); + await act(async () => { + await current.updatePendingEvents(params); }); - it('should be async and await completion', async () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - let updateCompleted = false; - mockGrouper.updatePendingEvents.mockImplementation(async () => { - updateCompleted = true; - }); - - const params: ProcessEventsParams = { - pendingStartActivities: [], - pendingStartDecision: null, - }; - - await act(async () => { - await result.current.updatePendingEvents(params); - }); - - expect(updateCompleted).toBe(true); - }); + expect(mockGrouperInstance.updatePendingEvents).toHaveBeenCalledWith( + params + ); }); - describe('cleanup', () => { - it('should unsubscribe from onChange on unmount', () => { - const mockUnsubscribe = jest.fn(); - mockGrouper.onChange.mockReturnValue(mockUnsubscribe); + it('should unsubscribe from onChange on unmount', () => { + const { unmount, mockUnsubscribe } = setup(); - const { unmount } = renderHook(() => useWorkflowHistoryGrouper()); + expect(mockUnsubscribe).not.toHaveBeenCalled(); - expect(mockUnsubscribe).not.toHaveBeenCalled(); + unmount(); - unmount(); + expect(mockUnsubscribe).toHaveBeenCalled(); + }); - expect(mockUnsubscribe).toHaveBeenCalled(); - }); + it('should call grouper.destroy on unmount', () => { + const { unmount, mockGrouperInstance } = setup(); - it('should call grouper.destroy on unmount', () => { - const { unmount } = renderHook(() => useWorkflowHistoryGrouper()); + expect(mockGrouperInstance.destroy).not.toHaveBeenCalled(); - expect(mockGrouper.destroy).not.toHaveBeenCalled(); + unmount(); - unmount(); + expect(mockGrouperInstance.destroy).toHaveBeenCalled(); + }); - expect(mockGrouper.destroy).toHaveBeenCalled(); + it('should handle rapid event updates', () => { + const { + result: { current }, + mockGrouperInstance, + } = setup(); + + const events1: HistoryEvent[] = [{ eventId: '1' } as HistoryEvent]; + const events2: HistoryEvent[] = [ + { eventId: '1' } as HistoryEvent, + { eventId: '2' } as HistoryEvent, + ]; + const events3: HistoryEvent[] = [ + { eventId: '1' } as HistoryEvent, + { eventId: '2' } as HistoryEvent, + { eventId: '3' } as HistoryEvent, + ]; + + act(() => { + current.updateEvents(events1); + current.updateEvents(events2); + current.updateEvents(events3); }); - it('should handle multiple unmounts safely', () => { - const { unmount } = renderHook(() => useWorkflowHistoryGrouper()); - - unmount(); - - expect(mockGrouper.destroy).toHaveBeenCalledTimes(1); - - // Second unmount should not throw - expect(() => unmount()).not.toThrow(); - }); + expect(mockGrouperInstance.updateEvents).toHaveBeenCalledTimes(3); + expect(mockGrouperInstance.updateEvents).toHaveBeenLastCalledWith(events3); }); - describe('return values', () => { - it('should return correct shape of object', () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - expect(result.current).toEqual({ - eventGroups: expect.any(Object), - isProcessing: expect.any(Boolean), - groupingState: expect.any(Object), - updateEvents: expect.any(Function), - updatePendingEvents: expect.any(Function), - }); - }); - - it('should maintain stable function references', () => { - const { result, rerender } = renderHook(() => - useWorkflowHistoryGrouper() - ); + it('should persist grouper instance across re-renders', () => { + const { rerender } = setup(); - const firstUpdateEvents = result.current.updateEvents; - const firstUpdatePendingEvents = result.current.updatePendingEvents; + expect(HistoryEventsGrouper).toHaveBeenCalledTimes(1); - rerender(); + rerender(); + rerender(); + rerender(); - expect(result.current.updateEvents).toBe(firstUpdateEvents); - expect(result.current.updatePendingEvents).toBe(firstUpdatePendingEvents); - }); + // Constructor should only be called once + expect(HistoryEventsGrouper).toHaveBeenCalledTimes(1); }); +}); - describe('integration scenarios', () => { - it('should handle rapid event updates', () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - const events1: HistoryEvent[] = [{ eventId: '1' } as HistoryEvent]; - const events2: HistoryEvent[] = [ - { eventId: '1' } as HistoryEvent, - { eventId: '2' } as HistoryEvent, - ]; - const events3: HistoryEvent[] = [ - { eventId: '1' } as HistoryEvent, - { eventId: '2' } as HistoryEvent, - { eventId: '3' } as HistoryEvent, - ]; - - act(() => { - result.current.updateEvents(events1); - result.current.updateEvents(events2); - result.current.updateEvents(events3); - }); - - expect(mockGrouper.updateEvents).toHaveBeenCalledTimes(3); - expect(mockGrouper.updateEvents).toHaveBeenLastCalledWith(events3); - }); - - it('should handle combined updates and state changes', async () => { - const { result } = renderHook(() => useWorkflowHistoryGrouper()); - - const mockEvents: HistoryEvent[] = [{ eventId: '1' } as HistoryEvent]; - const params: ProcessEventsParams = { - pendingStartActivities: [pendingActivityTaskStartEvent], - pendingStartDecision: null, - }; - - act(() => { - result.current.updateEvents(mockEvents); - }); +const createMockState = ( + overrides?: Partial +): GroupingProcessState => ({ + groups: {}, + processedEventsCount: 0, + remainingEventsCount: 0, + status: 'idle', + ...overrides, +}); - await act(async () => { - await result.current.updatePendingEvents(params); - }); +function setup(options?: { + initialState?: Partial; + throttleMs?: number; +}) { + let mockOnChangeCallback: (state: GroupingProcessState) => void; + const mockUnsubscribe = jest.fn(); - const newState = createMockState({ - groups: { group1: { groupType: 'Activity' } as any }, - processedEventsCount: 1, - }); + const initialState = createMockState(options?.initialState); - act(() => { - mockOnChangeCallback(newState); - }); + // Spy on the prototype methods to create type-safe mocks + const getStateSpy = jest + .spyOn(HistoryEventsGrouper.prototype, 'getState') + .mockReturnValue(initialState); - expect(result.current.eventGroups).toEqual(newState.groups); - expect(mockGrouper.updateEvents).toHaveBeenCalledWith(mockEvents); - expect(mockGrouper.updatePendingEvents).toHaveBeenCalledWith(params); + const onChangeSpy = jest + .spyOn(HistoryEventsGrouper.prototype, 'onChange') + .mockImplementation((callback) => { + mockOnChangeCallback = callback; + return mockUnsubscribe; }); - it('should persist grouper instance across re-renders', () => { - const { rerender } = renderHook(() => useWorkflowHistoryGrouper()); - - expect(HistoryEventsGrouper).toHaveBeenCalledTimes(1); - - rerender(); - rerender(); - rerender(); - - // Constructor should only be called once - expect(HistoryEventsGrouper).toHaveBeenCalledTimes(1); - }); - }); -}); + const updateEventsSpy = jest.spyOn( + HistoryEventsGrouper.prototype, + 'updateEvents' + ); + + const updatePendingEventsSpy = jest.spyOn( + HistoryEventsGrouper.prototype, + 'updatePendingEvents' + ); + + const destroySpy = jest.spyOn(HistoryEventsGrouper.prototype, 'destroy'); + + // Render the hook (constructor will create instance with spied methods) + const hookResult = renderHook(() => + useWorkflowHistoryGrouper(options?.throttleMs ?? 0) + ); + + return { + ...hookResult, + mockGrouperInstance: { + getState: getStateSpy, + onChange: onChangeSpy, + updateEvents: updateEventsSpy, + updatePendingEvents: updatePendingEventsSpy, + destroy: destroySpy, + }, + getMockOnChangeCallback: () => mockOnChangeCallback, + mockUnsubscribe, + }; +} diff --git a/src/views/workflow-history/hooks/use-workflow-history-grouper.ts b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts index f73105db1..033fad447 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-grouper.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts @@ -60,15 +60,12 @@ export default function useWorkflowHistoryGrouper(throttleMs = 2000) { }, []); // Expose updatePendingEvents method - const updatePendingEvents = useCallback( - async (params: ProcessEventsParams) => { - if (!grouperRef.current) { - return; - } - grouperRef.current.updatePendingEvents(params); - }, - [] - ); + const updatePendingEvents = useCallback((params: ProcessEventsParams) => { + if (!grouperRef.current) { + return; + } + grouperRef.current.updatePendingEvents(params); + }, []); return { eventGroups: groupingState?.groups ?? {}, From af8b0ee54761e07f7b4d855a5c1c41f7983d0958 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 01:09:37 +0000 Subject: [PATCH 45/58] hook for history grouper --- .../use-workflow-history-grouper.test.tsx | 278 ++++++++++++++++++ .../hooks/use-workflow-history-grouper.ts | 73 +++++ 2 files changed, 351 insertions(+) create mode 100644 src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx create mode 100644 src/views/workflow-history/hooks/use-workflow-history-grouper.ts diff --git a/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx b/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx new file mode 100644 index 000000000..a10efff1b --- /dev/null +++ b/src/views/workflow-history/hooks/__tests__/use-workflow-history-grouper.test.tsx @@ -0,0 +1,278 @@ +import { renderHook, act } from '@testing-library/react'; + +import type { HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; + +import { + mockActivityEventGroup, + mockDecisionEventGroup, +} from '../../__fixtures__/workflow-history-event-groups'; +import { + pendingActivityTaskStartEvent, + pendingDecisionTaskStartEvent, +} from '../../__fixtures__/workflow-history-pending-events'; +import HistoryEventsGrouper from '../../helpers/workflow-history-grouper'; +import type { + GroupingProcessState, + ProcessEventsParams, +} from '../../helpers/workflow-history-grouper.types'; +import useWorkflowHistoryGrouper from '../use-workflow-history-grouper'; + +jest.mock('../../helpers/workflow-history-grouper'); + +describe(useWorkflowHistoryGrouper.name, () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should create HistoryEventsGrouper with default batchSize', () => { + setup(); + + expect(HistoryEventsGrouper).toHaveBeenCalledWith({ + batchSize: 300, + }); + }); + + it('should initialize with state from grouper.getState()', () => { + const { + result: { current }, + mockGrouperInstance, + } = setup({ + initialState: { + groups: { + group1: mockActivityEventGroup, + }, + processedEventsCount: 10, + }, + }); + + expect(mockGrouperInstance.getState).toHaveBeenCalled(); + expect(current.eventGroups).toEqual({ + group1: mockActivityEventGroup, + }); + expect(current.groupingState).toMatchObject({ + groups: { + group1: mockActivityEventGroup, + }, + processedEventsCount: 10, + }); + }); + + it('should subscribe to grouper onChange', () => { + const { mockGrouperInstance } = setup(); + + expect(mockGrouperInstance.onChange).toHaveBeenCalledWith( + expect.any(Function) + ); + }); + + it('should update groupingState when onChange callback is triggered', () => { + const { result, getMockOnChangeCallback } = setup(); + + const newState = createMockState({ + groups: { + group1: mockDecisionEventGroup, + }, + processedEventsCount: 5, + status: 'processing', + }); + + act(() => { + const mockOnChangeCallback = getMockOnChangeCallback(); + mockOnChangeCallback(newState); + }); + + expect(result.current.groupingState).toEqual(newState); + expect(result.current.eventGroups).toEqual(newState.groups); + expect(result.current.isProcessing).toBe(true); + }); + + it('should set isProcessing to false when status is idle', () => { + const { result, getMockOnChangeCallback } = setup(); + + const idleState = createMockState({ + status: 'idle', + }); + + act(() => { + const mockOnChangeCallback = getMockOnChangeCallback(); + mockOnChangeCallback(idleState); + }); + + expect(result.current.isProcessing).toBe(false); + }); + + it('should set isProcessing to true when status is processing', () => { + const { result, getMockOnChangeCallback } = setup(); + + const processingState = createMockState({ + status: 'processing', + }); + + act(() => { + const mockOnChangeCallback = getMockOnChangeCallback(); + mockOnChangeCallback(processingState); + }); + + expect(result.current.isProcessing).toBe(true); + }); + + it('should call grouper.updateEvents with provided events', () => { + const { + result: { current }, + mockGrouperInstance, + } = setup(); + + const mockEvents: HistoryEvent[] = [ + { eventId: '1', eventTime: null } as HistoryEvent, + { eventId: '2', eventTime: null } as HistoryEvent, + ]; + + act(() => { + current.updateEvents(mockEvents); + }); + + expect(mockGrouperInstance.updateEvents).toHaveBeenCalledWith(mockEvents); + }); + + it('should call grouper.updatePendingEvents with provided params', async () => { + const { + result: { current }, + mockGrouperInstance, + } = setup(); + + const params: ProcessEventsParams = { + pendingStartActivities: [pendingActivityTaskStartEvent], + pendingStartDecision: pendingDecisionTaskStartEvent, + }; + + await act(async () => { + await current.updatePendingEvents(params); + }); + + expect(mockGrouperInstance.updatePendingEvents).toHaveBeenCalledWith( + params + ); + }); + + it('should unsubscribe from onChange on unmount', () => { + const { unmount, mockUnsubscribe } = setup(); + + expect(mockUnsubscribe).not.toHaveBeenCalled(); + + unmount(); + + expect(mockUnsubscribe).toHaveBeenCalled(); + }); + + it('should call grouper.destroy on unmount', () => { + const { unmount, mockGrouperInstance } = setup(); + + expect(mockGrouperInstance.destroy).not.toHaveBeenCalled(); + + unmount(); + + expect(mockGrouperInstance.destroy).toHaveBeenCalled(); + }); + + it('should handle rapid event updates', () => { + const { + result: { current }, + mockGrouperInstance, + } = setup(); + + const events1: HistoryEvent[] = [{ eventId: '1' } as HistoryEvent]; + const events2: HistoryEvent[] = [ + { eventId: '1' } as HistoryEvent, + { eventId: '2' } as HistoryEvent, + ]; + const events3: HistoryEvent[] = [ + { eventId: '1' } as HistoryEvent, + { eventId: '2' } as HistoryEvent, + { eventId: '3' } as HistoryEvent, + ]; + + act(() => { + current.updateEvents(events1); + current.updateEvents(events2); + current.updateEvents(events3); + }); + + expect(mockGrouperInstance.updateEvents).toHaveBeenCalledTimes(3); + expect(mockGrouperInstance.updateEvents).toHaveBeenLastCalledWith(events3); + }); + + it('should persist grouper instance across re-renders', () => { + const { rerender } = setup(); + + expect(HistoryEventsGrouper).toHaveBeenCalledTimes(1); + + rerender(); + rerender(); + rerender(); + + // Constructor should only be called once + expect(HistoryEventsGrouper).toHaveBeenCalledTimes(1); + }); +}); + +const createMockState = ( + overrides?: Partial +): GroupingProcessState => ({ + groups: {}, + processedEventsCount: 0, + remainingEventsCount: 0, + status: 'idle', + ...overrides, +}); + +function setup(options?: { + initialState?: Partial; + throttleMs?: number; +}) { + let mockOnChangeCallback: (state: GroupingProcessState) => void; + const mockUnsubscribe = jest.fn(); + + const initialState = createMockState(options?.initialState); + + // Spy on the prototype methods to create type-safe mocks + const getStateSpy = jest + .spyOn(HistoryEventsGrouper.prototype, 'getState') + .mockReturnValue(initialState); + + const onChangeSpy = jest + .spyOn(HistoryEventsGrouper.prototype, 'onChange') + .mockImplementation((callback) => { + mockOnChangeCallback = callback; + return mockUnsubscribe; + }); + + const updateEventsSpy = jest.spyOn( + HistoryEventsGrouper.prototype, + 'updateEvents' + ); + + const updatePendingEventsSpy = jest.spyOn( + HistoryEventsGrouper.prototype, + 'updatePendingEvents' + ); + + const destroySpy = jest.spyOn(HistoryEventsGrouper.prototype, 'destroy'); + + // Render the hook (constructor will create instance with spied methods) + const hookResult = renderHook(() => + useWorkflowHistoryGrouper(options?.throttleMs ?? 0) + ); + + return { + ...hookResult, + mockGrouperInstance: { + getState: getStateSpy, + onChange: onChangeSpy, + updateEvents: updateEventsSpy, + updatePendingEvents: updatePendingEventsSpy, + destroy: destroySpy, + }, + getMockOnChangeCallback: () => mockOnChangeCallback, + mockUnsubscribe, + }; +} diff --git a/src/views/workflow-history/hooks/use-workflow-history-grouper.ts b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts new file mode 100644 index 000000000..e170b1a9d --- /dev/null +++ b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts @@ -0,0 +1,73 @@ +import { useCallback, useEffect, useRef } from 'react'; + +import type { HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; +import useThrottledState from '@/hooks/use-throttled-state'; + +import HistoryEventsGrouper from '../helpers/workflow-history-grouper'; +import type { + GroupingProcessState, + ProcessEventsParams, +} from '../helpers/workflow-history-grouper.types'; + +/** + * Hook for grouping workflow history events using the HistoryEventsGrouper. + */ +export default function useWorkflowHistoryGrouper(throttleMs = 2000) { + const grouperRef = useRef(null); + + if (!grouperRef.current) { + grouperRef.current = new HistoryEventsGrouper({ + batchSize: 300, + }); + } + + const [groupingState, setGroupingState] = + useThrottledState( + grouperRef.current.getState(), + throttleMs, + { + leading: true, + trailing: true, + } + ); + + useEffect(() => { + if (!grouperRef.current) return; + + const unsubscribe = grouperRef.current.onChange((state) => { + const setImmediate = state.processedEventsCount < 300; + setGroupingState(() => state, setImmediate); + }); + + return () => unsubscribe(); + }, [setGroupingState]); + + useEffect(() => { + return () => { + grouperRef.current?.destroy(); + }; + }, []); + + const updateEvents = useCallback((newEvents: HistoryEvent[]) => { + if (!grouperRef.current) { + return; + } + + grouperRef.current.updateEvents(newEvents); + }, []); + + const updatePendingEvents = useCallback((params: ProcessEventsParams) => { + if (!grouperRef.current) { + return; + } + grouperRef.current.updatePendingEvents(params); + }, []); + + return { + eventGroups: groupingState?.groups ?? {}, + isProcessing: groupingState?.status === 'processing', + groupingState, + updateEvents, + updatePendingEvents, + }; +} From 854b81fe63663900e342e463bd936f1b5087857e Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 01:10:27 +0000 Subject: [PATCH 46/58] remove comments --- .../workflow-history/hooks/use-workflow-history-grouper.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/views/workflow-history/hooks/use-workflow-history-grouper.ts b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts index 033fad447..e170b1a9d 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-grouper.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-grouper.ts @@ -13,7 +13,6 @@ import type { * Hook for grouping workflow history events using the HistoryEventsGrouper. */ export default function useWorkflowHistoryGrouper(throttleMs = 2000) { - // Initialize the grouper once and persist across renders const grouperRef = useRef(null); if (!grouperRef.current) { @@ -22,7 +21,6 @@ export default function useWorkflowHistoryGrouper(throttleMs = 2000) { }); } - // Track grouping state - updated internally during processing const [groupingState, setGroupingState] = useThrottledState( grouperRef.current.getState(), @@ -50,7 +48,6 @@ export default function useWorkflowHistoryGrouper(throttleMs = 2000) { }; }, []); - // Expose updateEvents method (usually called automatically by effect) const updateEvents = useCallback((newEvents: HistoryEvent[]) => { if (!grouperRef.current) { return; @@ -59,7 +56,6 @@ export default function useWorkflowHistoryGrouper(throttleMs = 2000) { grouperRef.current.updateEvents(newEvents); }, []); - // Expose updatePendingEvents method const updatePendingEvents = useCallback((params: ProcessEventsParams) => { if (!grouperRef.current) { return; From cc38cb121f2c061abfae9ff18b64efb5bfb0776b Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 01:34:56 +0000 Subject: [PATCH 47/58] fetcher updates --- .../workflow-history-v2.tsx | 2 +- .../workflow-history-page-size.config.ts | 5 +- .../workflow-history-fetcher.test.tsx | 49 +++++++++++++++++-- .../helpers/workflow-history-fetcher.ts | 9 +++- .../use-workflow-history-fetcher.test.tsx | 6 ++- .../hooks/use-workflow-history-fetcher.ts | 15 ++++-- .../workflow-history/workflow-history.tsx | 2 +- 7 files changed, 74 insertions(+), 14 deletions(-) diff --git a/src/views/workflow-history-v2/workflow-history-v2.tsx b/src/views/workflow-history-v2/workflow-history-v2.tsx index 6f33a88db..fcdd0358b 100644 --- a/src/views/workflow-history-v2/workflow-history-v2.tsx +++ b/src/views/workflow-history-v2/workflow-history-v2.tsx @@ -4,7 +4,7 @@ import usePageFilters from '@/components/page-filters/hooks/use-page-filters'; import decodeUrlParams from '@/utils/decode-url-params'; import workflowHistoryFiltersConfig from '../workflow-history/config/workflow-history-filters.config'; -import WORKFLOW_HISTORY_PAGE_SIZE_CONFIG from '../workflow-history/config/workflow-history-page-size.config'; +import { WORKFLOW_HISTORY_PAGE_SIZE_CONFIG } from '../workflow-history/config/workflow-history-page-size.config'; import { WorkflowHistoryContext } from '../workflow-history/workflow-history-context-provider/workflow-history-context-provider'; import workflowPageQueryParamsConfig from '../workflow-page/config/workflow-page-query-params.config'; import { type WorkflowPageTabContentParams } from '../workflow-page/workflow-page-tab-content/workflow-page-tab-content.types'; diff --git a/src/views/workflow-history/config/workflow-history-page-size.config.ts b/src/views/workflow-history/config/workflow-history-page-size.config.ts index 933cf92dd..453efac10 100644 --- a/src/views/workflow-history/config/workflow-history-page-size.config.ts +++ b/src/views/workflow-history/config/workflow-history-page-size.config.ts @@ -1,3 +1,2 @@ -const WORKFLOW_HISTORY_PAGE_SIZE_CONFIG = 200; - -export default WORKFLOW_HISTORY_PAGE_SIZE_CONFIG; +export const WORKFLOW_HISTORY_PAGE_SIZE_CONFIG = 1000; +export const WORKFLOW_HISTORY_FIRST_PAGE_SIZE_CONFIG = 200; diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx index f2d59e315..23ab86ed2 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx @@ -262,6 +262,37 @@ describe(WorkflowHistoryFetcher.name, () => { const state = fetcher.getCurrentState(); expect(state.data?.pages.length).toBe(pageCountBefore); }); + + it('should use WORKFLOW_HISTORY_FIRST_PAGE_SIZE_CONFIG for the first page', async () => { + const { fetcher, getCapturedPageSizes } = setup(queryClient); + + fetcher.start((state) => !state?.data?.pages?.length); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.data?.pages).toHaveLength(1); + }); + + const pageSizes = getCapturedPageSizes(); + expect(pageSizes).toHaveLength(1); + expect(pageSizes[0]).toBe('200'); + }); + + it('should use WORKFLOW_HISTORY_PAGE_SIZE_CONFIG for subsequent pages', async () => { + const { fetcher, getCapturedPageSizes } = setup(queryClient); + + fetcher.start((state) => (state.data?.pages.length || 0) < 2); + + await waitFor(() => { + const state = fetcher.getCurrentState(); + expect(state.data?.pages).toHaveLength(2); + }); + + const pageSizes = getCapturedPageSizes(); + expect(pageSizes.length).toBeGreaterThanOrEqual(2); + expect(pageSizes[0]).toBe('200'); + expect(pageSizes[1]).toBe('1000'); + }); }); function setup(client: QueryClient, options: { failOnPages?: number[] } = {}) { @@ -273,7 +304,10 @@ function setup(client: QueryClient, options: { failOnPages?: number[] } = {}) { pageSize: 10, }; - mockHistoryEndpoint(workflowHistoryMultiPageFixture, options.failOnPages); + const { getCapturedPageSizes } = mockHistoryEndpoint( + workflowHistoryMultiPageFixture, + options.failOnPages + ); const fetcher = new WorkflowHistoryFetcher(client, params); hoistedFetcher = fetcher; @@ -292,6 +326,7 @@ function setup(client: QueryClient, options: { failOnPages?: number[] } = {}) { fetcher, params, waitForData, + getCapturedPageSizes, }; } @@ -299,6 +334,8 @@ function mockHistoryEndpoint( responses: GetWorkflowHistoryResponse[], failOnPages: number[] = [] ) { + const capturedPageSizes: string[] = []; + mswMockEndpoints([ { path: '/api/domains/:domain/:cluster/workflows/:workflowId/:runId/history', @@ -307,6 +344,9 @@ function mockHistoryEndpoint( httpResolver: async ({ request }) => { const url = new URL(request.url); const nextPage = url.searchParams.get('nextPage'); + const pageSize = url.searchParams.get('pageSize'); + + capturedPageSizes.push(pageSize ?? ''); // Determine current page number based on nextPage param let pageNumber = 1; @@ -328,10 +368,13 @@ function mockHistoryEndpoint( // Map page number to response index (0-indexed) const responseIndex = pageNumber - 1; - const response = - responses[responseIndex] || responses[responses.length - 1]; + const response = responses[responseIndex]; return HttpResponse.json(response); }, }, ]); + + return { + getCapturedPageSizes: () => capturedPageSizes, + }; } diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.ts index a43dfaa31..718b21093 100644 --- a/src/views/workflow-history/helpers/workflow-history-fetcher.ts +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.ts @@ -7,6 +7,11 @@ import { } from '@/route-handlers/get-workflow-history/get-workflow-history.types'; import request from '@/utils/request'; +import { + WORKFLOW_HISTORY_FIRST_PAGE_SIZE_CONFIG, + WORKFLOW_HISTORY_PAGE_SIZE_CONFIG, +} from '../config/workflow-history-page-size.config'; + import { type WorkflowHistoryQueryResult, type QueryResultOnChangeCallback, @@ -126,7 +131,9 @@ export default class WorkflowHistoryFetcher { url: `/api/domains/${params.domain}/${params.cluster}/workflows/${params.workflowId}/${params.runId}/history`, query: { nextPage: pageParam, - pageSize: params.pageSize, + pageSize: pageParam + ? WORKFLOW_HISTORY_PAGE_SIZE_CONFIG + : WORKFLOW_HISTORY_FIRST_PAGE_SIZE_CONFIG, waitForNewEvent: params.waitForNewEvent ?? false, } satisfies WorkflowHistoryQueryParams, }) diff --git a/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx index 8f67dcbb7..0cb9e7332 100644 --- a/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx +++ b/src/views/workflow-history/hooks/__tests__/use-workflow-history-fetcher.test.tsx @@ -19,13 +19,17 @@ let mockOnChangeCallback: jest.Mock; let mockUnsubscribe: jest.Mock; function setup() { - const hookResult = renderHook(() => useWorkflowHistoryFetcher(mockParams)); + const mockOnEventsChange = jest.fn(); + const hookResult = renderHook(() => + useWorkflowHistoryFetcher(mockParams, mockOnEventsChange) + ); return { ...hookResult, mockFetcherInstance, mockOnChangeCallback, mockUnsubscribe, + mockOnEventsChange, }; } diff --git a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts index c287edfd3..3609e3bbf 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts @@ -6,6 +6,7 @@ import { useQueryClient, } from '@tanstack/react-query'; +import { type HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; import useThrottledState from '@/hooks/use-throttled-state'; import { type WorkflowHistoryQueryParams, @@ -19,6 +20,7 @@ import { type ShouldContinueCallback } from '../helpers/workflow-history-fetcher export default function useWorkflowHistoryFetcher( params: WorkflowHistoryQueryParams & RouteParams, + onEventsChange: (events: HistoryEvent[]) => void, throttleMs: number = 2000 ) { const queryClient = useQueryClient(); @@ -26,6 +28,9 @@ export default function useWorkflowHistoryFetcher( if (!fetcherRef.current) { fetcherRef.current = new WorkflowHistoryFetcher(queryClient, params); + + // Fetch first page + fetcherRef.current.start((state) => !state?.data?.pages?.length); } const [historyQuery, setHistoryQuery] = useThrottledState< @@ -43,20 +48,22 @@ export default function useWorkflowHistoryFetcher( const unsubscribe = fetcherRef.current.onChange((state) => { const pagesCount = state.data?.pages?.length || 0; + onEventsChange( + state.data?.pages?.flatMap((page) => page.history?.events || []) || [] + ); // immediately set if there is the first page without throttling other wise throttle const executeImmediately = pagesCount <= 1; setHistoryQuery(() => state, executeImmediately); }); - // Fetch first page - fetcherRef.current.start((state) => !state?.data?.pages?.length); - return () => { unsubscribe(); }; - }, [setHistoryQuery]); + }, [setHistoryQuery, onEventsChange]); useEffect(() => { + if (!fetcherRef.current) return; + return () => { fetcherRef.current?.destroy(); }; diff --git a/src/views/workflow-history/workflow-history.tsx b/src/views/workflow-history/workflow-history.tsx index ef69c835b..7b8b5a81a 100644 --- a/src/views/workflow-history/workflow-history.tsx +++ b/src/views/workflow-history/workflow-history.tsx @@ -25,7 +25,7 @@ import workflowPageQueryParamsConfig from '../workflow-page/config/workflow-page import { useSuspenseDescribeWorkflow } from '../workflow-page/hooks/use-describe-workflow'; import workflowHistoryFiltersConfig from './config/workflow-history-filters.config'; -import WORKFLOW_HISTORY_PAGE_SIZE_CONFIG from './config/workflow-history-page-size.config'; +import { WORKFLOW_HISTORY_PAGE_SIZE_CONFIG } from './config/workflow-history-page-size.config'; import compareUngroupedEvents from './helpers/compare-ungrouped-events'; import getSortableEventId from './helpers/get-sortable-event-id'; import getVisibleGroupsHasMissingEvents from './helpers/get-visible-groups-has-missing-events'; From 2a41fc674e3599cd78dd4dec9eccdc2a764a76f6 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 10:03:17 +0000 Subject: [PATCH 48/58] add placeholder for fetcher --- src/views/workflow-history/workflow-history.tsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/views/workflow-history/workflow-history.tsx b/src/views/workflow-history/workflow-history.tsx index 7b8b5a81a..867711826 100644 --- a/src/views/workflow-history/workflow-history.tsx +++ b/src/views/workflow-history/workflow-history.tsx @@ -73,6 +73,8 @@ export default function WorkflowHistory({ params }: Props) { pageSize: wfHistoryRequestArgs.pageSize, waitForNewEvent: wfHistoryRequestArgs.waitForNewEvent, }, + //TODO replace this with grouper callback + () => {}, 2000 ); From 1f54d952d70deb519d8364c6cce7b6ba900fd858 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 10:04:43 +0000 Subject: [PATCH 49/58] update fetcher mock --- .../workflow-history/__tests__/workflow-history.test.tsx | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/views/workflow-history/__tests__/workflow-history.test.tsx b/src/views/workflow-history/__tests__/workflow-history.test.tsx index 797b1fde6..3a7a63400 100644 --- a/src/views/workflow-history/__tests__/workflow-history.test.tsx +++ b/src/views/workflow-history/__tests__/workflow-history.test.tsx @@ -29,11 +29,14 @@ jest.mock('@/hooks/use-page-query-params/use-page-query-params', () => ); // Mock the hook to use minimal throttle delay for faster tests +// Mock the hooks to use minimal throttle delay for faster tests jest.mock('../hooks/use-workflow-history-fetcher', () => { const actual = jest.requireActual('../hooks/use-workflow-history-fetcher'); return { __esModule: true, - default: jest.fn((params) => actual.default(params, 0)), // 0ms throttle for tests + default: jest.fn((params, onEventsChange) => + actual.default(params, onEventsChange, 0) + ), // 0ms throttle for tests }; }); From 3398d8ae83520ae861f707f5f23ab12e14c9e6bc Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 10:47:07 +0000 Subject: [PATCH 50/58] fetcher start update --- .../helpers/workflow-history-fetcher.ts | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/views/workflow-history/helpers/workflow-history-fetcher.ts b/src/views/workflow-history/helpers/workflow-history-fetcher.ts index 718b21093..9141698b9 100644 --- a/src/views/workflow-history/helpers/workflow-history-fetcher.ts +++ b/src/views/workflow-history/helpers/workflow-history-fetcher.ts @@ -49,13 +49,17 @@ export default class WorkflowHistoryFetcher { if (shouldContinue) { this.shouldContinue = shouldContinue; } - // If already started, return - if (this.isStarted) return; + + // remove current listener (if exists) to have fresh emits only + this.unsubscribe?.(); + this.unsubscribe = null; + this.isStarted = true; let emitCount = 0; const currentState = this.observer.getCurrentResult(); const fetchedFirstPage = currentState.status !== 'pending'; - const shouldEnableQuery = !fetchedFirstPage && shouldContinue(currentState); + const shouldEnableQuery = + !fetchedFirstPage && this.shouldContinue(currentState); if (shouldEnableQuery) { this.observer.setOptions({ @@ -86,8 +90,6 @@ export default class WorkflowHistoryFetcher { emit(currentState); } - // remove current listener (if exists) and add new one - this.unsubscribe?.(); this.unsubscribe = this.observer.subscribe((res) => emit(res)); } From fb1f87ca1e5f08a1197d3a042c5fb4ea2b17592a Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 10:47:43 +0000 Subject: [PATCH 51/58] update todo --- src/views/workflow-history/workflow-history.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/views/workflow-history/workflow-history.tsx b/src/views/workflow-history/workflow-history.tsx index 867711826..571ed112c 100644 --- a/src/views/workflow-history/workflow-history.tsx +++ b/src/views/workflow-history/workflow-history.tsx @@ -73,7 +73,7 @@ export default function WorkflowHistory({ params }: Props) { pageSize: wfHistoryRequestArgs.pageSize, waitForNewEvent: wfHistoryRequestArgs.waitForNewEvent, }, - //TODO replace this with grouper callback + //TODO: @assem.hafez replace this with grouper callback () => {}, 2000 ); From 650a918bb252317f8ae963fc3f27e88a166621d2 Mon Sep 17 00:00:00 2001 From: Assem Hafez <137278762+Assem-Uber@users.noreply.github.com> Date: Thu, 20 Nov 2025 13:45:48 +0100 Subject: [PATCH 52/58] Apply suggestion from @Copilot Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- .../helpers/__tests__/workflow-history-fetcher.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx index 23ab86ed2..0858d0caf 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx @@ -368,7 +368,7 @@ function mockHistoryEndpoint( // Map page number to response index (0-indexed) const responseIndex = pageNumber - 1; - const response = responses[responseIndex]; + const response = responses[responseIndex] || responses[responses.length - 1]; return HttpResponse.json(response); }, }, From a9e94696aa2a7d7ad47ad906b1d11cd376e6648b Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 14:25:43 +0100 Subject: [PATCH 53/58] fix copilot comments --- .../hooks/use-workflow-history-fetcher.ts | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts index 3609e3bbf..ae36a866c 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts @@ -45,12 +45,16 @@ export default function useWorkflowHistoryFetcher( useEffect(() => { if (!fetcherRef.current) return; - + let lastFlattenedPagesCount: number = 0; const unsubscribe = fetcherRef.current.onChange((state) => { const pagesCount = state.data?.pages?.length || 0; - onEventsChange( - state.data?.pages?.flatMap((page) => page.history?.events || []) || [] - ); + // if the pages count is greater than the last flattened pages count, then we need to flatten the pages and call the onEventsChange callback + if (pagesCount > lastFlattenedPagesCount) { + lastFlattenedPagesCount = pagesCount; + onEventsChange( + state.data?.pages?.flatMap((page) => page.history?.events || []) || [] + ); + } // immediately set if there is the first page without throttling other wise throttle const executeImmediately = pagesCount <= 1; setHistoryQuery(() => state, executeImmediately); @@ -62,8 +66,6 @@ export default function useWorkflowHistoryFetcher( }, [setHistoryQuery, onEventsChange]); useEffect(() => { - if (!fetcherRef.current) return; - return () => { fetcherRef.current?.destroy(); }; From 8dcf01c6b145d717775b5144e183bc31b1c9adb6 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 14:27:18 +0100 Subject: [PATCH 54/58] remove extra comment --- src/views/workflow-history/__tests__/workflow-history.test.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/src/views/workflow-history/__tests__/workflow-history.test.tsx b/src/views/workflow-history/__tests__/workflow-history.test.tsx index 3a7a63400..8109f4aad 100644 --- a/src/views/workflow-history/__tests__/workflow-history.test.tsx +++ b/src/views/workflow-history/__tests__/workflow-history.test.tsx @@ -29,7 +29,6 @@ jest.mock('@/hooks/use-page-query-params/use-page-query-params', () => ); // Mock the hook to use minimal throttle delay for faster tests -// Mock the hooks to use minimal throttle delay for faster tests jest.mock('../hooks/use-workflow-history-fetcher', () => { const actual = jest.requireActual('../hooks/use-workflow-history-fetcher'); return { From 573c1951ed218691e642b7bb04c34c2d8791df05 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 13:30:22 +0000 Subject: [PATCH 55/58] lint fix --- .../helpers/__tests__/workflow-history-fetcher.test.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx index 0858d0caf..7c0f89c91 100644 --- a/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx +++ b/src/views/workflow-history/helpers/__tests__/workflow-history-fetcher.test.tsx @@ -368,7 +368,8 @@ function mockHistoryEndpoint( // Map page number to response index (0-indexed) const responseIndex = pageNumber - 1; - const response = responses[responseIndex] || responses[responses.length - 1]; + const response = + responses[responseIndex] || responses[responses.length - 1]; return HttpResponse.json(response); }, }, From 730666333bf93d31583f8897d674e738e3602199 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Thu, 20 Nov 2025 13:33:41 +0000 Subject: [PATCH 56/58] change lastFlattented initial value to -1 --- .../workflow-history/hooks/use-workflow-history-fetcher.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts index ae36a866c..6ced9b164 100644 --- a/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts +++ b/src/views/workflow-history/hooks/use-workflow-history-fetcher.ts @@ -45,7 +45,7 @@ export default function useWorkflowHistoryFetcher( useEffect(() => { if (!fetcherRef.current) return; - let lastFlattenedPagesCount: number = 0; + let lastFlattenedPagesCount: number = -1; const unsubscribe = fetcherRef.current.onChange((state) => { const pagesCount = state.data?.pages?.length || 0; // if the pages count is greater than the last flattened pages count, then we need to flatten the pages and call the onEventsChange callback From a6477fe2b67123b1fc0e60709f2b4f76a9a4b0af Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Fri, 21 Nov 2025 12:45:10 +0000 Subject: [PATCH 57/58] remove unused import --- .../workflow-history/helpers/workflow-history-grouper.types.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts index 782d5b473..5d384e626 100644 --- a/src/views/workflow-history/helpers/workflow-history-grouper.types.ts +++ b/src/views/workflow-history/helpers/workflow-history-grouper.types.ts @@ -1,5 +1,3 @@ -import { type HistoryEvent } from '@/__generated__/proto-ts/uber/cadence/api/v1/HistoryEvent'; - import type { HistoryEventsGroups, PendingActivityTaskStartEvent, From 059577ac6d20a10e7ee0d22061daea5c2d6383b6 Mon Sep 17 00:00:00 2001 From: Assem Hafez Date: Mon, 24 Nov 2025 10:20:22 +0100 Subject: [PATCH 58/58] address comments --- .../hooks/__tests__/use-initial-selected-event.test.ts | 2 +- src/views/workflow-history/workflow-history.tsx | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/views/workflow-history/hooks/__tests__/use-initial-selected-event.test.ts b/src/views/workflow-history/hooks/__tests__/use-initial-selected-event.test.ts index 1d89df8d7..7eaf13b5d 100644 --- a/src/views/workflow-history/hooks/__tests__/use-initial-selected-event.test.ts +++ b/src/views/workflow-history/hooks/__tests__/use-initial-selected-event.test.ts @@ -76,7 +76,7 @@ describe('useInitialSelectedEvent', () => { expect(result.current.initialEventFound).toBe(true); }); - it('should return initialEventGroupIndex as undefined when selectedEventId is defined & group is not found in filtered entries', () => { + it('should return initialEventGroupIndex as undefined when selectedEventId is defined & event is not found in filtered entries', () => { // Group '2' exists in mockEventGroups but is filtered out from the visible list const filteredEventGroupsEntries: [string, HistoryEventsGroup][] = [ ['1', mockEventGroups['1']], diff --git a/src/views/workflow-history/workflow-history.tsx b/src/views/workflow-history/workflow-history.tsx index 6ed7c6024..b66076d99 100644 --- a/src/views/workflow-history/workflow-history.tsx +++ b/src/views/workflow-history/workflow-history.tsx @@ -79,9 +79,7 @@ export default function WorkflowHistory({ params }: Props) { pageSize: wfHistoryRequestArgs.pageSize, waitForNewEvent: wfHistoryRequestArgs.waitForNewEvent, }, - (events) => { - updateGrouperEvents(events); - }, + updateGrouperEvents, 2000 );