diff --git a/CHANGELOG.md b/CHANGELOG.md index e7aa2961..5967770b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,95 @@ # Release History +## 1.5.0 + +### Highlights + +- Added OAuth M2M support (databricks/databricks-sql-nodejs#168, databricks/databricks-sql-nodejs#177) +- Added named query parameters support (databricks/databricks-sql-nodejs#162, databricks/databricks-sql-nodejs#175) +- `runAsync` options is now deprecated (databricks/databricks-sql-nodejs#176) +- Added staging ingestion support (databricks/databricks-sql-nodejs#164) + +### Databricks OAuth support + +Databricks OAuth support added in v1.4.0 is now extended with M2M flow. To use OAuth instead of PAT, pass +a corresponding auth provider type and options to `DBSQLClient.connect`: + +```ts +// instantiate DBSQLClient as usual + +client.connect({ + // provide other mandatory options as usual - e.g. host, path, etc. + authType: 'databricks-oauth', + oauthClientId: '...', // optional - overwrite default OAuth client ID + azureTenantId: '...', // optional - provide custom Azure tenant ID + persistence: ..., // optional - user-provided storage for OAuth tokens, should implement OAuthPersistence interface +}) +``` + +U2M flow involves user interaction - the library will open a browser tab asking user to log in. To use this flow, +no other options are required except for `authType`. + +M2M flow does not require any user interaction, and therefore is a good option, say, for scripting. To use this +flow, two extra options are required for `DBSQLClient.connect`: `oauthClientId` and `oauthClientSecret`. + +Also see [Databricks docs](https://docs.databricks.com/en/dev-tools/auth.html#oauth-machine-to-machine-m2m-authentication) +for more details about Databricks OAuth. + +### Named query parameters + +v1.5.0 adds a support for [query parameters](https://docs.databricks.com/en/sql/language-manual/sql-ref-parameter-marker.html). +Currently only named parameters are supported. + +Basic usage example: + +```ts +// obtain session object as usual + +const operation = session.executeStatement('SELECT :p1 AS "str_param", :p2 AS "number_param"', { + namedParameters: { + p1: 'Hello, World', + p2: 3.14, + }, +}); +``` + +The library will infer parameter types from passed primitive objects. Supported data types include booleans, various +numeric types (including native `BigInt` and `Int64` from `node-int64`), native `Date` type, and string. + +It's also possible to explicitly specify the parameter type by passing `DBSQLParameter` instances instead of primitive +values. It also allows one to use values that don't have a corresponding primitive representation: + +```ts +import { ..., DBSQLParameter, DBSQLParameterType } from '@databricks/sql'; + +// obtain session object as usual + +const operation = session.executeStatement('SELECT :p1 AS "date_param", :p2 AS "interval_type"', { + namedParameters: { + p1: new DBSQLParameter({ + value: new Date('2023-09-06T03:14:27.843Z'), + type: DBSQLParameterType.DATE, // by default, Date objects are inferred as TIMESTAMP, this allows to override the type + }), + p2: new DBSQLParameter({ + value: 5, // INTERVAL '5' DAY + type: DBSQLParameterType.INTERVALDAY + }), + }, +}); +``` + +Of course, you can mix primitive values and `DBSQLParameter` instances. + +### `runAsync` deprecation + +Starting with this release, the library will execute all queries asynchronously, so we have deprecated +the `runAsync` option. It will be completely removed in v2. So you should not use it going forward and remove all +the usages from your code before version 2 is released. From user's perspective the library behaviour won't change. + +### Data ingestion support + +This feature allows you to upload, retrieve, and remove unity catalog volume files using SQL `PUT`, `GET` and `REMOVE` commands. + ## 1.4.0 - Added Cloud Fetch support (databricks/databricks-sql-nodejs#158) diff --git a/lib/index.ts b/lib/index.ts index d39c9930..bf3b3d81 100644 --- a/lib/index.ts +++ b/lib/index.ts @@ -3,7 +3,7 @@ import TCLIService from '../thrift/TCLIService'; import TCLIService_types from '../thrift/TCLIService_types'; import DBSQLClient from './DBSQLClient'; import DBSQLSession from './DBSQLSession'; -import { DBSQLParameter } from './DBSQLParameter'; +import { DBSQLParameter, DBSQLParameterType } from './DBSQLParameter'; import DBSQLLogger from './DBSQLLogger'; import PlainHttpAuthentication from './connection/auth/PlainHttpAuthentication'; import HttpConnection from './connection/connections/HttpConnection'; @@ -32,4 +32,4 @@ export const utils = { formatProgress, }; -export { DBSQLClient, DBSQLSession, DBSQLParameter, DBSQLLogger, LogLevel }; +export { DBSQLClient, DBSQLSession, DBSQLParameter, DBSQLParameterType, DBSQLLogger, LogLevel }; diff --git a/package-lock.json b/package-lock.json index 10243f38..5ad9a71c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@databricks/sql", - "version": "1.4.0", + "version": "1.5.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@databricks/sql", - "version": "1.4.0", + "version": "1.5.0", "hasInstallScript": true, "license": "Apache 2.0", "dependencies": { diff --git a/package.json b/package.json index d73520f8..66569778 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@databricks/sql", - "version": "1.4.0", + "version": "1.5.0", "description": "Driver for connection to Databricks SQL via Thrift API.", "main": "dist/index.js", "types": "dist/index.d.ts", diff --git a/tests/e2e/query_parameters.test.js b/tests/e2e/query_parameters.test.js index d599390c..48df1ef8 100644 --- a/tests/e2e/query_parameters.test.js +++ b/tests/e2e/query_parameters.test.js @@ -1,7 +1,7 @@ const { expect } = require('chai'); const Int64 = require('node-int64'); const config = require('./utils/config'); -const { DBSQLClient, DBSQLParameter } = require('../..'); +const { DBSQLClient, DBSQLParameter, DBSQLParameterType } = require('../..'); const openSession = async () => { const client = new DBSQLClient(); @@ -18,7 +18,8 @@ const openSession = async () => { }); }; -describe('Query parameters', () => { +// TODO: Temporarily disable those tests until we figure out issues with E2E test env +describe.skip('Query parameters', () => { it('should use named parameters', async () => { const session = await openSession(); const operation = await session.executeStatement( @@ -40,7 +41,7 @@ describe('Query parameters', () => { p_double: new DBSQLParameter({ value: 3.14 }), p_bigint_1: new DBSQLParameter({ value: BigInt(1234) }), p_bigint_2: new DBSQLParameter({ value: new Int64(1234) }), - p_date: new DBSQLParameter({ value: new Date('2023-09-06T03:14:27.843Z'), type: 'DATE' }), + p_date: new DBSQLParameter({ value: new Date('2023-09-06T03:14:27.843Z'), type: DBSQLParameterType.DATE }), p_timestamp: new DBSQLParameter({ value: new Date('2023-09-06T03:14:27.843Z') }), p_str: new DBSQLParameter({ value: 'Hello' }), }, diff --git a/tests/e2e/staging_ingestion.test.js b/tests/e2e/staging_ingestion.test.js index 9459687a..77bd82ca 100644 --- a/tests/e2e/staging_ingestion.test.js +++ b/tests/e2e/staging_ingestion.test.js @@ -3,7 +3,7 @@ const config = require('./utils/config'); const { DBSQLClient } = require('../..'); const fs = require('fs'); -// TODO Temporarily disable those tests until we figure out issues with E2E test env +// TODO: Temporarily disable those tests until we figure out issues with E2E test env describe.skip('Staging Test', () => { it('put staging data and receive it', async () => { const client = new DBSQLClient();