Skip to content

Commit 79ad92c

Browse files
authored
Do not load too many events (#1484)
## Changes While preloading updates in order to generate datasets and their schemas, it's possible to be stuck for a while in the loading (e.g. in the case of continuous pipelines with a lot of events). ## Tests <!-- How is this tested? -->
1 parent 8a19a41 commit 79ad92c

File tree

1 file changed

+7
-2
lines changed

1 file changed

+7
-2
lines changed

packages/databricks-vscode/src/bundle/BundlePipelinesManager.ts

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,8 @@ type SourceLocation = {
6767
notebook_cell_number?: number;
6868
};
6969

70+
const MAX_EVENTS_TO_LOAD = 1000;
71+
7072
export class BundlePipelinesManager {
7173
private disposables: Disposable[] = [];
7274
private readonly triggeredState: Map<string, PipelineState> = new Map();
@@ -303,11 +305,16 @@ export class BundlePipelinesManager {
303305
pipelineId,
304306
runs
305307
);
308+
let loadedEventsCount = 0;
306309
for await (const event of listing) {
307310
const runState = runs.get(event.origin?.update_id ?? "");
308311
if (runState?.events) {
309312
runState.events.push(event);
310313
}
314+
loadedEventsCount++;
315+
if (loadedEventsCount >= MAX_EVENTS_TO_LOAD) {
316+
break;
317+
}
311318
}
312319
const extractedData = extractPipelineDatasets(
313320
new Set(runs.values())
@@ -358,8 +365,6 @@ export class BundlePipelinesManager {
358365
if (oldestUpdateTime) {
359366
const timestamp = new Date(oldestUpdateTime).toISOString();
360367
listEventsOptions.filter = `timestamp >= '${timestamp}'`;
361-
} else {
362-
listEventsOptions.max_results = 100;
363368
}
364369
return client.pipelines.listPipelineEvents(listEventsOptions);
365370
}

0 commit comments

Comments
 (0)