From 544ea067a0715d7af0376bbbae649f7dd48e9436 Mon Sep 17 00:00:00 2001 From: Dan Choi Date: Tue, 24 Apr 2018 17:08:37 -0700 Subject: [PATCH] Bump version to 1.0.5 --- CHANGELOG.rst | 4 ++-- sagemaker-pyspark-sdk/setup.py | 2 +- sagemaker-spark-sdk/build.sbt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 24883be..83caaa7 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,8 +2,8 @@ CHANGELOG ========= -1.0.dev5 -======== +1.0.5 +===== * pyspark: SageMakerModel: Fix bugs in creating model from training job, s3 file and endpoint * spark/pyspark: XGBoostSageMakerEstimator: Fix seed hyperparameter to use correct type (Int) diff --git a/sagemaker-pyspark-sdk/setup.py b/sagemaker-pyspark-sdk/setup.py index 25b1e65..96147c4 100644 --- a/sagemaker-pyspark-sdk/setup.py +++ b/sagemaker-pyspark-sdk/setup.py @@ -7,7 +7,7 @@ from setuptools import setup -VERSION = "1.0.4" +VERSION = "1.0.5" TEMP_PATH = "deps" JARS_TARGET = os.path.join(TEMP_PATH, "jars") diff --git a/sagemaker-spark-sdk/build.sbt b/sagemaker-spark-sdk/build.sbt index 1aea845..a17bbdd 100644 --- a/sagemaker-spark-sdk/build.sbt +++ b/sagemaker-spark-sdk/build.sbt @@ -19,7 +19,7 @@ scalaVersion := "2.11.7" // to change the version of spark add -DSPARK_VERSION=2.x.x when running sbt // for example: "sbt -DSPARK_VERSION=2.1.1 clean compile test doc package" val sparkVersion = System.getProperty("SPARK_VERSION", "2.2.0") -version := "spark_" + sparkVersion + "-1.0.4" +version := "spark_" + sparkVersion + "-1.0.5" lazy val SageMakerSpark = (project in file("."))