Skip to content

Commit

Permalink
Merge branch 'release/2.5.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
seddonm1 committed Jan 5, 2021
2 parents 7efb3b3 + 60cdf73 commit 38a24a3
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 17 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
@@ -1,5 +1,9 @@
# Change Log

## 2.5.0

- bump to Arc 3.7.0.

## 2.4.1

- set name on Dataframe when `persist=true` to help understand persisted datasets when using the Spark UI.
Expand Down
2 changes: 1 addition & 1 deletion project/Dependencies.scala
Expand Up @@ -13,7 +13,7 @@ object Dependencies {
val novocode = "com.novocode" % "junit-interface" % "0.11" % "test"

// arc
val arc = "ai.tripl" %% "arc" % "3.4.1" % "provided"
val arc = "ai.tripl" %% "arc" % "3.7.0" % "provided"

// spark
val sparkCatalyst = "org.apache.spark" %% "spark-catalyst" % sparkVersion % "provided"
Expand Down
2 changes: 2 additions & 0 deletions src/it/scala/ai/tripl/arc/util/TestUtils.scala
Expand Up @@ -55,6 +55,7 @@ object TestUtils {
inlineSQL: Boolean = true,
inlineSchema: Boolean = true,
dropUnsupported: Boolean = false,
completionEnvironments: List[String] = List("production", "test"),
)(implicit spark: SparkSession): ARCContext = {
val loader = ai.tripl.arc.util.Utils.getContextOrSparkClassLoader

Expand All @@ -80,6 +81,7 @@ object TestUtils {
serializableConfiguration=new SerializableConfiguration(spark.sparkContext.hadoopConfiguration),
userData=collection.mutable.Map.empty,
resolutionConfig=ConfigFactory.load(),
completionEnvironments=completionEnvironments,
)
}

Expand Down
9 changes: 4 additions & 5 deletions src/main/scala/ai/tripl/arc/extract/DeltaLakeExtract.scala
Expand Up @@ -39,16 +39,15 @@ class DeltaLakeExtract extends PipelineStagePlugin with JupyterCompleter {

val version = ai.tripl.arc.deltalake.BuildInfo.version

val snippet = """{
def snippet()(implicit arcContext: ARCContext): String = {
s"""{
| "type": "DeltaLakeExtract",
| "name": "DeltaLakeExtract",
| "environments": [
| "production",
| "test"
| ],
| "environments": [${arcContext.completionEnvironments.map { env => s""""${env}""""}.mkString(", ")}],
| "inputURI": "hdfs://*.delta",
| "outputView": "outputView"
|}""".stripMargin
}

val documentationURI = new java.net.URI(s"${baseURI}/extract/#deltalakeextract")

Expand Down
9 changes: 4 additions & 5 deletions src/main/scala/ai/tripl/arc/load/DeltaLakeLoad.scala
Expand Up @@ -33,16 +33,15 @@ class DeltaLakeLoad extends PipelineStagePlugin with JupyterCompleter {

val version = ai.tripl.arc.deltalake.BuildInfo.version

val snippet = """{
def snippet()(implicit arcContext: ARCContext): String = {
s"""{
| "type": "DeltaLakeLoad",
| "name": "DeltaLakeLoad",
| "environments": [
| "production",
| "test"
| ],
| "environments": [${arcContext.completionEnvironments.map { env => s""""${env}""""}.mkString(", ")}],
| "inputView": "inputView",
| "outputURI": "hdfs://*.delta"
|}""".stripMargin
}

val documentationURI = new java.net.URI(s"${baseURI}/load/#deltalakeload")

Expand Down
9 changes: 4 additions & 5 deletions src/main/scala/ai/tripl/arc/load/DeltaLakeMergeLoad.scala
Expand Up @@ -34,13 +34,11 @@ class DeltaLakeMergeLoad extends PipelineStagePlugin with JupyterCompleter {

val version = ai.tripl.arc.deltalake.BuildInfo.version

val snippet = """{
def snippet()(implicit arcContext: ARCContext): String = {
s"""{
| "type": "DeltaLakeMergeLoad",
| "name": "DeltaLakeMergeLoad",
| "environments": [
| "production",
| "test"
| ],
| "environments": [${arcContext.completionEnvironments.map { env => s""""${env}""""}.mkString(", ")}],
| "inputView": "inputView",
| "outputURI": "hdfs://*.delta",
| "condition": "source.primaryKey = target.primaryKey",
Expand All @@ -49,6 +47,7 @@ class DeltaLakeMergeLoad extends PipelineStagePlugin with JupyterCompleter {
| "whenNotMatchedByTargetInsert": {},
| "whenNotMatchedBySourceDelete": {}
|}""".stripMargin
}

val documentationURI = new java.net.URI(s"${baseURI}/load/#deltalakemergeload")

Expand Down
2 changes: 2 additions & 0 deletions src/test/scala/ai/tripl/arc/util/TestUtils.scala
Expand Up @@ -55,6 +55,7 @@ object TestUtils {
inlineSQL: Boolean = true,
inlineSchema: Boolean = true,
dropUnsupported: Boolean = false,
completionEnvironments: List[String] = List("production", "test"),
)(implicit spark: SparkSession): ARCContext = {
val loader = ai.tripl.arc.util.Utils.getContextOrSparkClassLoader

Expand All @@ -80,6 +81,7 @@ object TestUtils {
serializableConfiguration=new SerializableConfiguration(spark.sparkContext.hadoopConfiguration),
userData=collection.mutable.Map.empty,
resolutionConfig=ConfigFactory.load(),
completionEnvironments=completionEnvironments,
)
}

Expand Down
2 changes: 1 addition & 1 deletion version.sbt
@@ -1 +1 @@
version := "2.4.1"
version := "2.5.0"

0 comments on commit 38a24a3

Please sign in to comment.