Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[GH-5676][GH-5679][GH-5619] Add support for Python 3.10 & 3.11 #5677

Merged
merged 12 commits into from Oct 24, 2023
2 changes: 1 addition & 1 deletion build.gradle
Expand Up @@ -79,7 +79,7 @@ ext {
docProjects = [project(':sparkling-water-doc')]

scalaTestVersions = ['2.11': '2.2.1', '2.12': '3.0.8']
numpyVersions = ['2': '1.16.6', '3': '1.19.5']
numpyVersions = ['3.6': '1.19.5', '3.7': '1.19.5', 'default': '1.23.5']
}

def versionSpecificProps = new Properties()
Expand Down
2 changes: 1 addition & 1 deletion gradle-spark3.0.properties
Expand Up @@ -7,4 +7,4 @@ databricksVersion=7.3.x-cpu-ml-scala2.12
fabricK8sClientVersion=4.9.2
executorOverheadMemoryOption=spark.executor.memoryOverhead
driverOverheadMemoryOption=spark.driver.memoryOverhead
supportedPythonVersions=3.6 3.7 3.8 3.9
supportedPythonVersions=3.6 3.7 3.8 3.9 3.10
2 changes: 1 addition & 1 deletion gradle-spark3.1.properties
Expand Up @@ -7,4 +7,4 @@ databricksVersion=9.1.x-cpu-ml-scala2.12
fabricK8sClientVersion=4.12.0
executorOverheadMemoryOption=spark.executor.memoryOverhead
driverOverheadMemoryOption=spark.driver.memoryOverhead
supportedPythonVersions=3.6 3.7 3.8 3.9
supportedPythonVersions=3.6 3.7 3.8 3.9 3.10
2 changes: 1 addition & 1 deletion gradle-spark3.2.properties
Expand Up @@ -7,4 +7,4 @@ databricksVersion=10.4.x-cpu-ml-scala2.12
fabricK8sClientVersion=5.4.1
executorOverheadMemoryOption=spark.executor.memoryOverhead
driverOverheadMemoryOption=spark.driver.memoryOverhead
supportedPythonVersions=3.6 3.7 3.8 3.9
supportedPythonVersions=3.6 3.7 3.8 3.9 3.10
2 changes: 1 addition & 1 deletion gradle-spark3.3.properties
Expand Up @@ -7,4 +7,4 @@ databricksVersion=11.0.x-cpu-ml-scala2.12
fabricK8sClientVersion=5.12.2
executorOverheadMemoryOption=spark.executor.memoryOverhead
driverOverheadMemoryOption=spark.driver.memoryOverhead
supportedPythonVersions=3.7 3.8 3.9
supportedPythonVersions=3.7 3.8 3.9 3.10
2 changes: 1 addition & 1 deletion gradle-spark3.4.properties
Expand Up @@ -7,4 +7,4 @@ databricksVersion=13.0.x-cpu-ml-scala2.12
fabricK8sClientVersion=6.4.1
executorOverheadMemoryOption=spark.executor.memoryOverhead
driverOverheadMemoryOption=spark.driver.memoryOverhead
supportedPythonVersions=3.7 3.8 3.9
supportedPythonVersions=3.7 3.8 3.9 3.10 3.11
4 changes: 2 additions & 2 deletions gradle.properties
Expand Up @@ -19,13 +19,13 @@ systemProp.org.gradle.internal.publish.checksums.insecure=true
# Version of Terraform used in the script creating the docker image
terraformVersion=0.12.25
# Version of docker image used in Jenkins tests
dockerImageVersion=80
dockerImageVersion=83
# Is this build nightly build
isNightlyBuild=false
# Supported Major Spark Versions
supportedSparkVersions=2.3 2.4 3.0 3.1 3.2 3.3 3.4
# The list of python environments used in automated tests
pythonEnvironments=3.6 3.7 3.8 3.9
pythonEnvironments=3.6 3.7 3.8 3.9 3.10 3.11
# Select for which Spark version is Sparkling Water built by default
spark=3.4
# Sparkling Water Version
Expand Down
13 changes: 7 additions & 6 deletions py-scoring/build.gradle
Expand Up @@ -27,18 +27,19 @@ python {
if (project.hasProperty("pythonPath")) {
pythonPath project.findProperty("pythonPath").toString()
}
def pythonMajorVersion = getPythonVersion().substring(0, 1)
def numpyVersion = numpyVersions.get(pythonMajorVersion)
String pythonVersion = getPythonVersion()
String pythonVersionWithoutPatchPart = pythonVersion.substring(0, pythonVersion.lastIndexOf('.'))
String numpyVersion = numpyVersions.get(pythonVersionWithoutPatchPart, numpyVersions.get("default"))

pip "pytz:2019.1" // Needed in Integration tests, but not PySparkling dependency
pip "pytest:4.6.9" // For running tests
pip "pytz:2023.3.post1" // Needed in Integration tests, but not PySparkling dependency
pip "pytest:6.2.5"
pip "numpy:${numpyVersion}"
pip "pyspark:${sparkVersion}"
if (project.hasProperty("pythonEnvBasePath")) {
// for CI as we use pre-cached environment
envPath "${project.findProperty("pythonEnvBasePath")}/${getPythonVersion()}/${sparkVersion}"
envPath "${project.findProperty("pythonEnvBasePath")}/${pythonVersion}/${sparkVersion}"
} else {
envPath "${rootDir}/.gradle/python/${getPythonVersion()}/${sparkVersion}"
envPath "${rootDir}/.gradle/python/${pythonVersion}/${sparkVersion}"
}
}

Expand Down
12 changes: 6 additions & 6 deletions py/build.gradle
Expand Up @@ -28,13 +28,13 @@ python {
if (project.hasProperty("pythonPath")) {
pythonPath project.findProperty("pythonPath").toString()
}
def pythonMajorVersion = getPythonVersion().substring(0, 1)
def numpyVersion = numpyVersions.get(pythonMajorVersion)
String pythonVersionWithoutPatchPart = pythonVersion.substring(0, pythonVersion.lastIndexOf('.'))
String numpyVersion = numpyVersions.get(pythonVersionWithoutPatchPart, numpyVersions.get("default"))

pip "pytz:2019.1" // Needed in Integration tests, but not PySparkling dependency
pip "pytest:4.6.9" // For running tests
pip "tabulate:0.8.3"
pip "requests:2.21.0"
pip "pytz:2023.3.post1" // Needed in Integration tests, but not PySparkling dependency
pip "pytest:6.2.5"
pip "tabulate:0.8.10"
pip "requests:2.27.1"
pip "numpy:${numpyVersion}"
pip "pyspark:${sparkVersion}"
if (project.hasProperty("pythonEnvBasePath")) {
Expand Down