diff --git a/Private/Add-DatabricksChunk.ps1 b/Private/Add-DatabricksChunk.ps1 new file mode 100644 index 0000000..a3b355e --- /dev/null +++ b/Private/Add-DatabricksChunk.ps1 @@ -0,0 +1,7 @@ +Function Add-DatabricksChunk([string]$part, [string]$InternalBearerToken, [string]$Region, $handle){ + $Body = @{"data"=$part} + $Body['handle'] = $handle + $BodyText = $Body | ConvertTo-Json -Depth 10 + Invoke-RestMethod -Uri "https://$Region.azuredatabricks.net/api/2.0/dbfs/add-block" -Body $BodyText -Method 'POST' -Headers @{Authorization = $InternalBearerToken} + Return +} \ No newline at end of file diff --git a/Public/Add-DatabricksDBFSFile.ps1 b/Public/Add-DatabricksDBFSFile.ps1 index 3a667e8..121026e 100644 --- a/Public/Add-DatabricksDBFSFile.ps1 +++ b/Public/Add-DatabricksDBFSFile.ps1 @@ -39,14 +39,6 @@ #> -Function Add-DatabricksChunk([string]$part, [string]$InternalBearerToken, [string]$Region, $handle){ - $Body = @{"data"=$part} - $Body['handle'] = $handle - $BodyText = $Body | ConvertTo-Json -Depth 10 - Invoke-RestMethod -Uri "https://$Region.azuredatabricks.net/api/2.0/dbfs/add-block" -Body $BodyText -Method 'POST' -Headers @{Authorization = $InternalBearerToken} - Return -} - Function Add-DatabricksDBFSFile { [cmdletbinding()] param ( diff --git a/README.md b/README.md index 18990b9..c21c1a2 100644 --- a/README.md +++ b/README.md @@ -105,10 +105,11 @@ Deploy a folder of scripts from a local folder (Git repo) to a specific folder i ## Jobs - Add-DatabricksNotebookJob - Schedule a job based on a Notebook. -- Add-DatabricksNotebookJob - Schedule a job based on a Python script (stored in DBFS). +- Add-DatabricksPythonJob - Schedule a job based on a Python script (stored in DBFS). +- Add-DatabricksJarJob - Schedule a job based on a Jar (stored in DBFS). +- Add-DatabricksSparkSubmitJob - Schedule a job based on a spark-submit command. - Remove-DatabricksJob -Note: There is currently no support for Jar jobs or Spark Submit in this module - it may come in the future (please express an interest in Issues if you would like this). Python jobs do not work in Databricks (see the Jobs UI it is missing as an option). Generally in Azure we would recommend using ADF to execute jobs rather using Databricks jobs. ## Libraries @@ -130,6 +131,8 @@ Deployment tasks exist here: https://marketplace.visualstudio.com/items?itemName Note that not all commandlets are available as tasks. Instead you may want to import the module and create PowerShell scripts that use these. +## Contribute +Contributions are welcomed! Please create a pull request with changes/additions.