Skip to content

Commit c28620a

Browse files
Move all dbconnect run options to Databricks Run Icon (#1066)
## Changes * The run icon has new options to run the current python file and debug it. * The run and debug options run with a bootstrap script (`dbconnect_bootstrap.py`) that * Sets up the env vars by recursing up the file tree and searching for .databricks.env * Sets up the Databricks globals ## Tests <!-- How is this tested? -->
1 parent bb63006 commit c28620a

File tree

16 files changed

+600
-391
lines changed

16 files changed

+600
-391
lines changed

packages/databricks-vscode/.vscodeignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,4 +15,5 @@ scripts/**
1515
coverage/
1616
.nyc_output/
1717
logs/
18-
extension/
18+
extension/
19+
**/*.vsix

packages/databricks-vscode/package.json

Lines changed: 45 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -135,17 +135,14 @@
135135
"command": "databricks.run.runEditorContentsAsWorkflow",
136136
"title": "Run File as Workflow",
137137
"category": "Databricks",
138-
"enablement": "!inDebugMode",
139-
"icon": {
140-
"dark": "resources/dark/logo.svg",
141-
"light": "resources/light/logo.svg"
142-
}
138+
"enablement": "!inDebugMode && databricks.context.activated && databricks.context.loggedIn",
139+
"icon": "$(play)"
143140
},
144141
{
145142
"command": "databricks.run.runEditorContents",
146143
"title": "Upload and Run File",
147144
"category": "Databricks",
148-
"enablement": "!inDebugMode",
145+
"enablement": "!inDebugMode && databricks.context.activated && databricks.context.loggedIn",
149146
"icon": "$(play)"
150147
},
151148
{
@@ -255,6 +252,18 @@
255252
"enablement": "databricks.context.activated",
256253
"category": "Databricks"
257254
},
255+
{
256+
"command": "databricks.run.dbconnect.debug",
257+
"title": "Databricks Connect: Debug current file",
258+
"enablement": "databricks.context.activated && databricks.context.loggedIn",
259+
"category": "Databricks"
260+
},
261+
{
262+
"command": "databricks.run.dbconnect.run",
263+
"title": "Databricks Connect: Run current file",
264+
"enablement": "databricks.context.activated && databricks.context.loggedIn",
265+
"category": "Databricks"
266+
},
258267
{
259268
"command": "databricks.bundle.showLogs",
260269
"title": "Show bundle logs",
@@ -444,6 +453,16 @@
444453
"command": "databricks.run.runEditorContentsAsWorkflow",
445454
"when": "resourceLangId == python || resourceLangId == scala || resourceLangId == r || resourceLangId == sql || resourceExtname == .ipynb",
446455
"group": "1_remote@2"
456+
},
457+
{
458+
"command": "databricks.run.dbconnect.debug",
459+
"when": "resourceLangId == python",
460+
"group": "2_local@1"
461+
},
462+
{
463+
"command": "databricks.run.dbconnect.run",
464+
"when": "resourceLangId == python",
465+
"group": "2_local@2"
447466
}
448467
],
449468
"commandPalette": [
@@ -474,11 +493,7 @@
474493
],
475494
"explorer/context": [
476495
{
477-
"command": "databricks.run.runEditorContents",
478-
"when": "resourceLangId == python"
479-
},
480-
{
481-
"command": "databricks.run.runEditorContentsAsWorkflow",
496+
"submenu": "databricks.run",
482497
"when": "resourceLangId == python"
483498
}
484499
]
@@ -493,7 +508,7 @@
493508
"id": "databricks.run",
494509
"label": "Run on Databricks",
495510
"icon": {
496-
"dark": "resources/dark/logo.svg",
511+
"dark": "resources/dark/databricks-run-icon.svg",
497512
"light": "resources/light/logo.svg"
498513
}
499514
}
@@ -637,6 +652,19 @@
637652
}
638653
}
639654
]
655+
},
656+
{
657+
"type": "python",
658+
"configurationAttributes": {
659+
"launch": {
660+
"properties": {
661+
"databricks": {
662+
"type": "boolean",
663+
"description": "Setup databricks environment variables and globals."
664+
}
665+
}
666+
}
667+
}
640668
}
641669
],
642670
"configuration": [
@@ -808,11 +836,11 @@
808836
"@typescript-eslint/parser": "^6.14.0",
809837
"@typescript-eslint/utils": "^6.14.0",
810838
"@vscode/test-electron": "^2.3.8",
811-
"@wdio/cli": "^8.26.1",
812-
"@wdio/local-runner": "^8.26.1",
813-
"@wdio/mocha-framework": "^8.24.12",
814-
"@wdio/spec-reporter": "^8.24.12",
815-
"@wdio/types": "^8.24.12",
839+
"@wdio/cli": "^8.32.3",
840+
"@wdio/local-runner": "^8.32.3",
841+
"@wdio/mocha-framework": "^8.32.3",
842+
"@wdio/spec-reporter": "^8.32.2",
843+
"@wdio/types": "^8.32.2",
816844
"chai": "^4.3.10",
817845
"esbuild": "^0.19.9",
818846
"eslint": "^8.55.0",
Lines changed: 18 additions & 0 deletions
Loading
Lines changed: 18 additions & 0 deletions
Loading
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
import os
2+
3+
# Load environment variables from .databricks/.databricks.env
4+
# We only look for the folder in the current working directory
5+
# since for all commands laucnhed from
6+
def load_env_file_from_cwd(path: str):
7+
if not os.path.isdir(path):
8+
return
9+
10+
env_file_path = os.path.join(path, ".databricks", ".databricks.env")
11+
if not os.path.exists(os.path.dirname(env_file_path)):
12+
return
13+
14+
with open(env_file_path, "r") as f:
15+
for line in f.readlines():
16+
key, value = line.strip().split("=", 1)
17+
os.environ[key] = value
18+
return
19+
20+
load_env_file_from_cwd(os.getcwd())
21+
22+
log_level = os.environ.get("DATABRICKS_VSCODE_LOG_LEVEL")
23+
log_level = log_level if log_level is not None else "WARN"
24+
25+
import logging
26+
logging.basicConfig(level=log_level)
27+
28+
db_globals = {}
29+
30+
from databricks.sdk.runtime import dbutils
31+
db_globals['dbutils'] = dbutils
32+
33+
# "table", "sc", "sqlContext" are missing
34+
try:
35+
from pyspark.sql import functions as udf, SparkSession
36+
from databricks.connect import DatabricksSession
37+
spark: SparkSession = DatabricksSession.builder.getOrCreate()
38+
sql = spark.sql
39+
db_globals['spark'] = spark
40+
db_globals['sql'] = sql
41+
db_globals['udf'] = udf
42+
except Exception as e:
43+
logging.debug(f"Failed to create DatabricksSession: {e}")
44+
45+
# We do this to prevent importing widgets implementation prematurely
46+
# The widget import should prompt users to use the implementation
47+
# which has ipywidget support.
48+
def getArgument(*args, **kwargs):
49+
return dbutils.widgets.getArgument(*args, **kwargs)
50+
51+
db_globals['getArgument'] = getArgument
52+
53+
from runpy import run_path
54+
import sys
55+
56+
script = sys.argv[1]
57+
sys.argv = sys.argv[1:]
58+
logging.debug(f"Running ${script}")
59+
logging.debug(f"args: ${sys.argv[1:]}")
60+
61+
run_path(script, init_globals=db_globals, run_name="__main__")

packages/databricks-vscode/src/bundle/models/BundlePreValidateModel.ts

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import {BaseModelWithStateCache} from "../../configuration/models/BaseModelWithS
55
import {UrlUtils} from "../../utils";
66
import {Mutex} from "../../locking";
77
import * as lodash from "lodash";
8+
import {withOnErrorHandler} from "../../utils/onErrorDecorator";
89

910
export type BundlePreValidateState = {
1011
host?: URL;
@@ -26,9 +27,14 @@ export class BundlePreValidateModel extends BaseModelWithStateCache<BundlePreVal
2627
) {
2728
super();
2829
this.disposables.push(
29-
this.bunldeFileWatcher.onDidChange(async () => {
30-
await this.stateCache.refresh();
31-
})
30+
this.bunldeFileWatcher.onDidChange(
31+
withOnErrorHandler(
32+
async () => {
33+
await this.stateCache.refresh();
34+
},
35+
{popup: false, log: true, throw: false}
36+
)
37+
)
3238
);
3339
}
3440

packages/databricks-vscode/src/configuration/ConnectionManager.ts

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
1-
import {WorkspaceClient, ApiClient, logging} from "@databricks/databricks-sdk";
1+
import {
2+
WorkspaceClient,
3+
ApiClient,
4+
logging,
5+
AuthType as SdkAuthType,
6+
} from "@databricks/databricks-sdk";
27
import {Cluster} from "../sdk-extensions";
38
import {EventEmitter, Uri, window, Disposable, commands} from "vscode";
49
import {CliWrapper, ProcessError} from "../cli/CliWrapper";
@@ -187,6 +192,9 @@ export class ConnectionManager implements Disposable {
187192
return this._workspaceClient?.apiClient;
188193
}
189194

195+
get authType(): SdkAuthType | undefined {
196+
return this.apiClient?.config.authType;
197+
}
190198
private async loginWithSavedAuth() {
191199
await this.disconnect();
192200
const authProvider = await this.resolveAuth();

packages/databricks-vscode/src/extension.ts

Lines changed: 37 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ import {BundleRunStatusManager} from "./bundle/run/BundleRunStatusManager";
5959
import {BundleProjectManager} from "./bundle/BundleProjectManager";
6060
import {TreeItemDecorationProvider} from "./ui/bundle-resource-explorer/DecorationProvider";
6161
import {BundleInitWizard} from "./bundle/BundleInitWizard";
62+
import {DatabricksDebugConfigurationProvider} from "./run/DatabricksDebugConfigurationProvider";
6263

6364
const customWhenContext = new CustomWhenContext();
6465

@@ -319,33 +320,31 @@ export async function activate(
319320
featureManager
320321
);
321322

323+
const databricksEnvFileManager = new DatabricksEnvFileManager(
324+
workspaceUri,
325+
featureManager,
326+
connectionManager,
327+
configModel
328+
);
329+
322330
const notebookInitScriptManager = new NotebookInitScriptManager(
323331
workspaceUri,
324332
context,
325333
connectionManager,
326334
featureManager,
327-
pythonExtensionWrapper
335+
pythonExtensionWrapper,
336+
databricksEnvFileManager,
337+
configModel
328338
);
329339

330340
context.subscriptions.push(
341+
dbConnectStatusBarButton,
331342
notebookInitScriptManager,
332343
telemetry.registerCommand(
333344
"databricks.notebookInitScript.verify",
334345
notebookInitScriptManager.verifyInitScriptCommand,
335346
notebookInitScriptManager
336-
)
337-
);
338-
339-
const databricksEnvFileManager = new DatabricksEnvFileManager(
340-
workspaceUri,
341-
featureManager,
342-
dbConnectStatusBarButton,
343-
connectionManager,
344-
context,
345-
notebookInitScriptManager
346-
);
347-
348-
context.subscriptions.push(
347+
),
349348
workspace.onDidOpenNotebookDocument(() =>
350349
featureManager.isEnabled("notebooks.dbconnect")
351350
),
@@ -572,7 +571,16 @@ export async function activate(
572571
);
573572

574573
// Run/debug group
575-
const runCommands = new RunCommands(connectionManager);
574+
const databricksDebugConfigurationProvider =
575+
new DatabricksDebugConfigurationProvider(context);
576+
577+
const runCommands = new RunCommands(
578+
connectionManager,
579+
workspace.workspaceFolders[0],
580+
pythonExtensionWrapper,
581+
featureManager,
582+
context
583+
);
576584
const debugFactory = new DatabricksDebugAdapterFactory(
577585
connectionManager,
578586
configModel,
@@ -587,6 +595,20 @@ export async function activate(
587595
);
588596

589597
context.subscriptions.push(
598+
debug.registerDebugConfigurationProvider(
599+
"python",
600+
databricksDebugConfigurationProvider
601+
),
602+
telemetry.registerCommand(
603+
"databricks.run.dbconnect.debug",
604+
runCommands.debugFileUsingDbconnect,
605+
runCommands
606+
),
607+
telemetry.registerCommand(
608+
"databricks.run.dbconnect.run",
609+
runCommands.runFileUsingDbconnect,
610+
runCommands
611+
),
590612
telemetry.registerCommand(
591613
"databricks.run.runEditorContents",
592614
runCommands.runEditorContentsCommand(),

0 commit comments

Comments
 (0)