Skip to content

Commit

Permalink
implement secrets as well
Browse files Browse the repository at this point in the history
  • Loading branch information
mgyucht committed Aug 30, 2023
1 parent 4265bf4 commit 8662c98
Show file tree
Hide file tree
Showing 5 changed files with 56 additions and 24 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
out/
target/
.idea/
15 changes: 14 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,9 @@
<url>https://github.com/databricks/databricks-dbutils-scala/blob/main/.github/workflows/pr.yml</url>
</ciManagement>


<properties>
<!-- Scala version is overridden by profiles -->
<scala.version>2.12.10</scala.version>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
Expand All @@ -59,6 +60,18 @@
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<!-- Nullable annotation -->
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<version>3.0.2</version>
</dependency>
<!-- Databricks SDK -->
<dependency>
<groupId>com.databricks</groupId>
<artifactId>databricks-sdk-java</artifactId>
<version>0.7.0</version>
</dependency>
</dependencies>

<distributionManagement>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package com.databricks.sdk.scala
package dbutils

import org.apache.hadoop.fs.FileSystem
import javax.annotation.Nullable

object DBUtils {
Expand Down Expand Up @@ -39,7 +38,7 @@ trait WithHelpMethods {
trait DbfsUtils extends Serializable with WithHelpMethods {

// Is this necessary?
def dbfs: FileSystem
// def dbfs: FileSystem

def ls(dir: String): Seq[FileInfo]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@ package dbutils

import Implicits._

import org.apache.hadoop.fs.FileSystem

object Implicits {
implicit class ReflectiveLookup(o: AnyRef) {
def getField[T](field: String): T = {
Expand Down Expand Up @@ -118,7 +116,6 @@ class ProxyDbfsUtils(fs: DBUtilsWrapper) extends DbfsUtils {

override def help(moduleOrMethod: String): Unit = fs.help(moduleOrMethod)

override def dbfs: FileSystem = fs.forField("dbfs").getUnderlying("dbfs")
override def ls(dir: String): Seq[FileInfo] = fs.invoke("ls", Seq(dir), p =>
p.asInstanceOf[Seq[AnyRef]].map { p =>
FileInfo(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
package com.databricks.sdk.scala
package dbutils

import scala.collection.JavaConverters._
import com.databricks.sdk.WorkspaceClient
import com.databricks.sdk.core.{DatabricksConfig, DatabricksError, DatabricksException}
import com.databricks.sdk.service.files.{Delete, Put, ReadDbfsRequest, UploadFileRequest}
import org.apache.hadoop.fs.FileSystem
import com.databricks.sdk.core.{DatabricksConfig, DatabricksError}
import com.databricks.sdk.service.files.UploadRequest

import java.io.{ByteArrayInputStream, InputStream}
import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets
import scala.collection.JavaConverters._

object SdkDbfsUtilsImpl {
def unsupportedMethod(methodName: String): Nothing = {
Expand Down Expand Up @@ -38,7 +37,7 @@ class SdkDBUtilsImpl(config: DatabricksConfig) extends DBUtils with NoHelp {
override def meta: MetaUtils = SdkDbfsUtilsImpl.unsupportedField("meta")
override val fs: DbfsUtils = new SdkDbfsUtils(client)
override def notebook: NotebookUtils = SdkDbfsUtilsImpl.unsupportedField("notebook")
override def secrets: SecretUtils = SdkDbfsUtilsImpl.unsupportedField("secrets")
override def secrets: SecretUtils = new SdkSecretsUtils(client)
override def library: LibraryUtils = SdkDbfsUtilsImpl.unsupportedField("library")
override def credentials: DatabricksCredentialUtils = SdkDbfsUtilsImpl.unsupportedField("credentials")
override def jobs: JobsUtils = SdkDbfsUtilsImpl.unsupportedField("jobs")
Expand All @@ -48,11 +47,11 @@ class SdkDBUtilsImpl(config: DatabricksConfig) extends DBUtils with NoHelp {
class SdkDbfsUtils(w: WorkspaceClient) extends DbfsUtils with NoHelp {
override def ls(dir: String): Seq[FileInfo] = SdkDbfsUtilsImpl.unsupportedMethod("dbutils.fs.ls")

override def rm(dir: String, recurse: Boolean): Boolean = {
override def rm(file: String, recurse: Boolean): Boolean = {
if (recurse) {
throw new UnsupportedOperationException("Recursive delete is not yet supported in the SDK version of DBUtils.")
}
w.files().deleteFile(dir)
w.files().delete(file)
// Should we list before and after? Swallow errors?
true
}
Expand All @@ -67,10 +66,14 @@ class SdkDbfsUtils(w: WorkspaceClient) extends DbfsUtils with NoHelp {
}

private def mv(from: String, to: String, recurse: Boolean, delete: Boolean): Boolean = {
val inputStream = w.files().downloadFile(from)
w.files().uploadFile(new UploadFileRequest().setFilePath(to).setBody(inputStream))
if (delete) {
w.files().deleteFile(from)
val inputStream = w.files().download(from).getContents
try {
w.files().upload(new UploadRequest().setFilePath(to).setContents(inputStream))
if (delete) {
w.files().delete(from)
}
} finally {
inputStream.close()
}
// What to return here?
true
Expand All @@ -84,7 +87,7 @@ class SdkDbfsUtils(w: WorkspaceClient) extends DbfsUtils with NoHelp {
}

override def head(file: String, maxBytes: Int): String = {
val fileStream = w.files().downloadFile(file)
val fileStream = w.files().download(file).getContents
try {
val byteArray = new Array[Byte](maxBytes)
val numBytes = fileStream.read(byteArray)
Expand All @@ -97,11 +100,11 @@ class SdkDbfsUtils(w: WorkspaceClient) extends DbfsUtils with NoHelp {

override def put(file: String, contents: String, overwrite: Boolean): Boolean = {
try {
w.files().uploadFile(
new UploadFileRequest()
w.files().upload(
new UploadRequest()
.setFilePath(file)
.setOverwrite(overwrite)
.setBody(new ByteArrayInputStream(contents.getBytes(StandardCharsets.UTF_8)))
.setContents(new ByteArrayInputStream(contents.getBytes(StandardCharsets.UTF_8)))
)
} catch {
case e: DatabricksError if e.getMessage == "No matching namespace can be found" =>
Expand Down Expand Up @@ -130,6 +133,26 @@ class SdkDbfsUtils(w: WorkspaceClient) extends DbfsUtils with NoHelp {
override def mounts(): Seq[MountInfo] = SdkDbfsUtilsImpl.unsupportedMethod("dbutils.fs.mounts")

override def unmount(mountPoint: String): Boolean = SdkDbfsUtilsImpl.unsupportedMethod("dbutils.fs.unmount")
}

class SdkSecretsUtils(client: WorkspaceClient) extends SecretUtils with NoHelp {
override def get(scope: String, key: String): String = {
client.secrets().get(scope, key)
}

override def dbfs: FileSystem = SdkDbfsUtilsImpl.unsupportedField("dbutils.fs.dbfs")
override def getBytes(scope: String, key: String): Array[Byte] = {
client.secrets().getBytes(scope, key)
}

override def list(scope: String): Seq[SecretMetadata] = {
client.secrets().listSecrets(scope).asScala.toSeq.map { secret =>
SecretMetadata(secret.getKey)
}
}

override def listScopes(): Seq[SecretScope] = {
client.secrets().listScopes().asScala.toSeq.map { scope =>
SecretScope(scope.getName)
}
}
}

0 comments on commit 8662c98

Please sign in to comment.