Skip to content

Commit 890dd56

Browse files
authored
Reuse output panels (#150)
- re-use output panels - move cluster URL generation to the SDK - cleanup `job.html`
1 parent 45a3e82 commit 890dd56

File tree

8 files changed

+397
-293
lines changed

8 files changed

+397
-293
lines changed

packages/databricks-sdk-js/src/services/Cluster.test.ts

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -273,4 +273,40 @@ describe(__filename, function () {
273273
assert.deepEqual(cluster.dbrVersion, expectedDbr);
274274
}
275275
});
276+
277+
it("should return correct URLs", async () => {
278+
const mockedClient = mock(ApiClient);
279+
when(mockedClient.host).thenResolve(
280+
new URL("https://test.cloud.databricks.com")
281+
);
282+
const clusterDetails = {
283+
cluster_id: "1118-013127-82wynr8t",
284+
};
285+
const cluster = new Cluster(instance(mockedClient), clusterDetails);
286+
287+
assert.equal(
288+
await cluster.url,
289+
"https://test.cloud.databricks.com/#setting/clusters/1118-013127-82wynr8t/configuration"
290+
);
291+
292+
assert.equal(
293+
await cluster.driverLogsUrl,
294+
"https://test.cloud.databricks.com/#setting/clusters/1118-013127-82wynr8t/driverLogs"
295+
);
296+
297+
assert.equal(
298+
await cluster.metricsUrl,
299+
"https://test.cloud.databricks.com/#setting/clusters/1118-013127-82wynr8t/metrics"
300+
);
301+
302+
assert.equal(
303+
await cluster.getSparkUiUrl(),
304+
"https://test.cloud.databricks.com/#setting/clusters/1118-013127-82wynr8t/sparkUi"
305+
);
306+
307+
assert.equal(
308+
await cluster.getSparkUiUrl("7189805239423176682"),
309+
"https://test.cloud.databricks.com/#setting/sparkui/1118-013127-82wynr8t/driver-7189805239423176682"
310+
);
311+
});
276312
});

packages/databricks-sdk-js/src/services/Cluster.ts

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,11 +42,35 @@ export class Cluster {
4242

4343
get url(): Promise<string> {
4444
return (async () =>
45-
`${(await this.client.host).host}/#setting/clusters/${
45+
`https://${(await this.client.host).host}/#setting/clusters/${
4646
this.id
4747
}/configuration`)();
4848
}
4949

50+
get driverLogsUrl(): Promise<string> {
51+
return (async () =>
52+
`https://${(await this.client.host).host}/#setting/clusters/${
53+
this.id
54+
}/driverLogs`)();
55+
}
56+
57+
get metricsUrl(): Promise<string> {
58+
return (async () =>
59+
`https://${(await this.client.host).host}/#setting/clusters/${
60+
this.id
61+
}/metrics`)();
62+
}
63+
64+
async getSparkUiUrl(sparkContextId?: string): Promise<string> {
65+
let host = (await this.client.host).host;
66+
67+
if (sparkContextId) {
68+
return `https://${host}/#setting/sparkui/${this.id}/driver-${sparkContextId}`;
69+
} else {
70+
return `https://${host}/#setting/clusters/${this.id}/sparkUi`;
71+
}
72+
}
73+
5074
get memoryMb(): number | undefined {
5175
return this.clusterDetails.cluster_memory_mb;
5276
}

packages/databricks-vscode/src/cluster/ClusterListDataProvider.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ describe(__filename, () => {
120120
{
121121
contextValue: "databricks-link",
122122
description:
123-
"www.example.com/#setting/clusters/cluster-id-2/configuration",
123+
"https://www.example.com/#setting/clusters/cluster-id-2/configuration",
124124
label: "URL:",
125125
},
126126
{

packages/databricks-vscode/src/extension.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ import {
22
commands,
33
debug,
44
ExtensionContext,
5-
OutputChannel,
65
tasks,
76
window,
87
workspace,

packages/databricks-vscode/src/run/DabaricksWorkflowDebugAdapter.ts

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import {
2323
import {DebugProtocol} from "@vscode/debugprotocol";
2424
import {ConnectionManager} from "../configuration/ConnectionManager";
2525
import {Subject} from "./Subject";
26-
import {runAsWorkflow} from "./WorkflowOutputPanel";
26+
import {WorkflowRunner} from "./WorkflowRunner";
2727
import {promptForClusterStart} from "./prompts";
2828
import {CodeSynchronizer} from "../sync/CodeSynchronizer";
2929

@@ -49,22 +49,27 @@ interface IAttachRequestArguments extends ILaunchRequestArguments {}
4949
export class DatabricksWorkflowDebugAdapterFactory
5050
implements DebugAdapterDescriptorFactory, Disposable
5151
{
52+
private workflowRunner: WorkflowRunner;
53+
5254
constructor(
5355
private connection: ConnectionManager,
54-
private context: ExtensionContext,
55-
private codeSynchronizer: CodeSynchronizer
56-
) {}
56+
context: ExtensionContext,
57+
codeSynchronizer: CodeSynchronizer
58+
) {
59+
this.workflowRunner = new WorkflowRunner(context, codeSynchronizer);
60+
}
5761

58-
dispose() {}
62+
dispose() {
63+
this.workflowRunner.dispose();
64+
}
5965

6066
createDebugAdapterDescriptor(
6167
_session: DebugSession
6268
): ProviderResult<DebugAdapterDescriptor> {
6369
return new DebugAdapterInlineImplementation(
6470
new DatabricksWorkflowDebugSession(
6571
this.connection,
66-
this.context,
67-
this.codeSynchronizer
72+
this.workflowRunner
6873
)
6974
);
7075
}
@@ -77,8 +82,7 @@ export class DatabricksWorkflowDebugSession extends LoggingDebugSession {
7782

7883
constructor(
7984
private connection: ConnectionManager,
80-
private context: ExtensionContext,
81-
private codeSynchronizer: CodeSynchronizer
85+
private workflowRunner: WorkflowRunner
8286
) {
8387
super();
8488
}
@@ -191,15 +195,13 @@ export class DatabricksWorkflowDebugSession extends LoggingDebugSession {
191195
return;
192196
}
193197

194-
await runAsWorkflow({
198+
await this.workflowRunner.run({
195199
program: Uri.file(program),
196200
parameters,
197201
args,
198202
cluster,
199203
syncDestination: syncDestination,
200-
context: this.context,
201204
token: this.token,
202-
codeSynchronizer: this.codeSynchronizer,
203205
});
204206
}
205207

0 commit comments

Comments
 (0)