diff --git a/build.sbt b/build.sbt index e81a1bc26..42c751e41 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ import sbt.Keys.publishLocalConfiguration ThisBuild / resolvers += Resolver.mavenLocal ThisBuild / scalaVersion := "2.12.15" -ThisBuild / version := "0.7.2" +ThisBuild / version := "0.8.0" ThisBuild / organization := "com.linkedin.feathr" ThisBuild / organizationName := "linkedin" val sparkVersion = "3.1.3" diff --git a/docs/how-to-guides/azure_resource_provision.json b/docs/how-to-guides/azure_resource_provision.json index 8cc48ddc9..03d175052 100644 --- a/docs/how-to-guides/azure_resource_provision.json +++ b/docs/how-to-guides/azure_resource_provision.json @@ -111,7 +111,7 @@ "destinationBacpacBlobUrl": "[concat('https://',variables('dlsName'),'.blob.core.windows.net/',variables('dlsFsName'),'/',variables('bacpacBlobName'))]", "bacpacDeploymentScriptName": "CopyBacpacFile", "bacpacDbExtensionName": "registryRbacDbImport", - "preBuiltdockerImage": "feathrfeaturestore/feathr-registry:releases-v0.7.2" + "preBuiltdockerImage": "feathrfeaturestore/feathr-registry:releases-v0.8.0" }, "functions": [], "resources": [ diff --git a/docs/how-to-guides/local-spark-provider.md b/docs/how-to-guides/local-spark-provider.md index b7206d388..433af64f3 100644 --- a/docs/how-to-guides/local-spark-provider.md +++ b/docs/how-to-guides/local-spark-provider.md @@ -36,7 +36,7 @@ A spark-submit script will auto generated in your workspace under `debug` folder spark-submit \ --master local[*] \ --name project_feathr_local_spark_test \ - --packages "org.apache.spark:spark-avro_2.12:3.3.0,com.microsoft.sqlserver:mssql-jdbc:10.2.0.jre8,com.microsoft.azure:spark-mssql-connector_2.12:1.2.0,org.apache.logging.log4j:log4j-core:2.17.2,com.typesafe:config:1.3.4,com.fasterxml.jackson.core:jackson-databind:2.12.6.1,org.apache.hadoop:hadoop-mapreduce-client-core:2.7.7,org.apache.hadoop:hadoop-common:2.7.7,org.apache.avro:avro:1.8.2,org.apache.xbean:xbean-asm6-shaded:4.10,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,com.microsoft.azure:azure-eventhubs-spark_2.12:2.3.21,org.apache.kafka:kafka-clients:3.1.0,com.google.guava:guava:31.1-jre,it.unimi.dsi:fastutil:8.1.1,org.mvel:mvel2:2.2.8.Final,com.fasterxml.jackson.module:jackson-module-scala_2.12:2.13.3,com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.12.6,com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.12.6,com.jasonclawson:jackson-dataformat-hocon:1.1.0,com.redislabs:spark-redis_2.12:3.1.0,org.apache.xbean:xbean-asm6-shaded:4.10,com.google.protobuf:protobuf-java:3.19.4,net.snowflake:snowflake-jdbc:3.13.18,net.snowflake:spark-snowflake_2.12:2.10.0-spark_3.2,org.apache.commons:commons-lang3:3.12.0,org.xerial:sqlite-jdbc:3.36.0.3,com.github.changvvb:jackson-module-caseclass_2.12:1.1.1,com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12:4.11.1,org.eclipse.jetty:jetty-util:9.3.24.v20180605,commons-io:commons-io:2.6,org.apache.hadoop:hadoop-azure:2.7.4,com.microsoft.azure:azure-storage:8.6.4,com.linkedin.feathr:feathr_2.12:0.7.2" \ + --packages "org.apache.spark:spark-avro_2.12:3.3.0,com.microsoft.sqlserver:mssql-jdbc:10.2.0.jre8,com.microsoft.azure:spark-mssql-connector_2.12:1.2.0,org.apache.logging.log4j:log4j-core:2.17.2,com.typesafe:config:1.3.4,com.fasterxml.jackson.core:jackson-databind:2.12.6.1,org.apache.hadoop:hadoop-mapreduce-client-core:2.7.7,org.apache.hadoop:hadoop-common:2.7.7,org.apache.avro:avro:1.8.2,org.apache.xbean:xbean-asm6-shaded:4.10,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,com.microsoft.azure:azure-eventhubs-spark_2.12:2.3.21,org.apache.kafka:kafka-clients:3.1.0,com.google.guava:guava:31.1-jre,it.unimi.dsi:fastutil:8.1.1,org.mvel:mvel2:2.2.8.Final,com.fasterxml.jackson.module:jackson-module-scala_2.12:2.13.3,com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.12.6,com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.12.6,com.jasonclawson:jackson-dataformat-hocon:1.1.0,com.redislabs:spark-redis_2.12:3.1.0,org.apache.xbean:xbean-asm6-shaded:4.10,com.google.protobuf:protobuf-java:3.19.4,net.snowflake:snowflake-jdbc:3.13.18,net.snowflake:spark-snowflake_2.12:2.10.0-spark_3.2,org.apache.commons:commons-lang3:3.12.0,org.xerial:sqlite-jdbc:3.36.0.3,com.github.changvvb:jackson-module-caseclass_2.12:1.1.1,com.azure.cosmos.spark:azure-cosmos-spark_3-1_2-12:4.11.1,org.eclipse.jetty:jetty-util:9.3.24.v20180605,commons-io:commons-io:2.6,org.apache.hadoop:hadoop-azure:2.7.4,com.microsoft.azure:azure-storage:8.6.4,com.linkedin.feathr:feathr_2.12:0.8.0" \ --conf "spark.driver.extraClassPath=../target/scala-2.12/classes:jars/config-1.3.4.jar:jars/jackson-dataformat-hocon-1.1.0.jar:jars/jackson-module-caseclass_2.12-1.1.1.jar:jars/mvel2-2.2.8.Final.jar:jars/fastutil-8.1.1.jar" \ --conf "spark.hadoop.fs.wasbs.impl=org.apache.hadoop.fs.azure.NativeAzureFileSystem" \ --class com.linkedin.feathr.offline.job.FeatureJoinJob \ diff --git a/feathr_project/docs/conf.py b/feathr_project/docs/conf.py index 4708ca78d..7f5dd35f3 100644 --- a/feathr_project/docs/conf.py +++ b/feathr_project/docs/conf.py @@ -24,9 +24,9 @@ author = 'Feathr Community' # The short X.Y version -version = '0.7' +version = '0.8' # The full version, including alpha/beta/rc tags -release = '0.7' +release = '0.8' # -- General configuration --------------------------------------------------- diff --git a/feathr_project/feathr/constants.py b/feathr_project/feathr/constants.py index 2da8b4049..6686f14ac 100644 --- a/feathr_project/feathr/constants.py +++ b/feathr_project/feathr/constants.py @@ -28,7 +28,7 @@ TYPEDEF_ARRAY_DERIVED_FEATURE=f"array" TYPEDEF_ARRAY_ANCHOR_FEATURE=f"array" -FEATHR_MAVEN_ARTIFACT="com.linkedin.feathr:feathr_2.12:0.7.2" +FEATHR_MAVEN_ARTIFACT="com.linkedin.feathr:feathr_2.12:0.8.0" JOIN_CLASS_NAME="com.linkedin.feathr.offline.job.FeatureJoinJob" GEN_CLASS_NAME="com.linkedin.feathr.offline.job.FeatureGenJob" \ No newline at end of file diff --git a/feathr_project/setup.py b/feathr_project/setup.py index bcb900cf7..79a3c3190 100644 --- a/feathr_project/setup.py +++ b/feathr_project/setup.py @@ -7,7 +7,7 @@ setup( name='feathr', - version='0.7.2', + version='0.8.0', long_description=long_description, long_description_content_type="text/markdown", author_email="feathr-technical-discuss@lists.lfaidata.foundation", diff --git a/feathr_project/test/test_user_workspace/feathr_config.yaml b/feathr_project/test/test_user_workspace/feathr_config.yaml index ba04466ad..e67c803ef 100644 --- a/feathr_project/test/test_user_workspace/feathr_config.yaml +++ b/feathr_project/test/test_user_workspace/feathr_config.yaml @@ -82,7 +82,7 @@ spark_config: # Feathr Job configuration. Support local paths, path start with http(s)://, and paths start with abfs(s):// # this is the default location so end users don't have to compile the runtime again. # feathr_runtime_location: wasbs://public@azurefeathrstorage.blob.core.windows.net/feathr-assembly-LATEST.jar - feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar" + feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.8.0.jar" databricks: # workspace instance workspace_instance_url: 'https://adb-2474129336842816.16.azuredatabricks.net/' @@ -93,7 +93,7 @@ spark_config: # Feathr Job location. Support local paths, path start with http(s)://, and paths start with dbfs:/ work_dir: 'dbfs:/feathr_getting_started' # this is the default location so end users don't have to compile the runtime again. - feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar" + feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.8.0.jar" online_store: redis: diff --git a/feathr_project/test/test_user_workspace/feathr_config_purview.yaml b/feathr_project/test/test_user_workspace/feathr_config_purview.yaml index 2d52dda22..8785dfdec 100644 --- a/feathr_project/test/test_user_workspace/feathr_config_purview.yaml +++ b/feathr_project/test/test_user_workspace/feathr_config_purview.yaml @@ -82,7 +82,7 @@ spark_config: # Feathr Job configuration. Support local paths, path start with http(s)://, and paths start with abfs(s):// # this is the default location so end users don't have to compile the runtime again. # feathr_runtime_location: wasbs://public@azurefeathrstorage.blob.core.windows.net/feathr-assembly-LATEST.jar - feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar" + feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.8.0.jar" databricks: # workspace instance workspace_instance_url: 'https://adb-2474129336842816.16.azuredatabricks.net/' @@ -93,7 +93,7 @@ spark_config: # Feathr Job location. Support local paths, path start with http(s)://, and paths start with dbfs:/ work_dir: 'dbfs:/feathr_getting_started' # this is the default location so end users don't have to compile the runtime again. - feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.7.2.jar" + feathr_runtime_location: "../../target/scala-2.12/feathr-assembly-0.8.0.jar" online_store: redis: