Skip to content

Commit

Permalink
Updated SingleStore and Spark versions (#82)
Browse files Browse the repository at this point in the history
Updated SingleStore and Spark versions

Deleted 3.0, 3.1, and 3.2 Spark versions from testing.
Added 8.5 SingeStore to testing, and deleted old SingleStore versions.
---------

Co-authored-by: Adalbert Makarovych <amakarovych0ua@singlestore.com>
  • Loading branch information
AdalbertMemSQL and Adalbert Makarovych committed Feb 23, 2024
1 parent c0955b9 commit c37e884
Show file tree
Hide file tree
Showing 23 changed files with 115 additions and 1,362 deletions.
44 changes: 10 additions & 34 deletions .circleci/config.yml
Expand Up @@ -49,19 +49,10 @@ jobs:
name: Run tests
command: |
export SINGLESTORE_HOST=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' singlestore-integration)
if [ << parameters.spark_version >> == '3.0.3' ]
then
sbt ++2.12.12 "testOnly -- -l ExcludeFromSpark30" -Dspark.version=<< parameters.spark_version >>
elif [ << parameters.spark_version >> == '3.1.3' ]
then
sbt ++2.12.12 "testOnly -- -l ExcludeFromSpark31" -Dspark.version=<< parameters.spark_version >>
elif [ << parameters.spark_version >> == '3.2.4' ]
then
sbt ++2.12.12 "testOnly -- -l ExcludeFromSpark32" -Dspark.version=<< parameters.spark_version >>
elif [ << parameters.spark_version >> == '3.3.3' ]
if [ << parameters.spark_version >> == '3.3.4' ]
then
sbt ++2.12.12 "testOnly -- -l ExcludeFromSpark33" -Dspark.version=<< parameters.spark_version >>
elif [ << parameters.spark_version >> == '3.4.1' ]
elif [ << parameters.spark_version >> == '3.4.2' ]
then
sbt ++2.12.12 "testOnly -- -l ExcludeFromSpark34" -Dspark.version=<< parameters.spark_version >>
else
Expand All @@ -82,25 +73,13 @@ jobs:
openssl enc -d -aes-256-cbc -K ${ENCRYPTION_KEY} -iv ${ENCRYPTION_IV} -in ci/secring.asc.enc -out ci/secring.asc
gpg --import ci/secring.asc
- run:
name: Publish Spark 3.0.3
command: |
sbt ++2.12.12 -Dspark.version=3.0.3 clean publishSigned sonatypeBundleRelease
- run:
name: Publish Spark 3.1.3
command: |
sbt ++2.12.12 -Dspark.version=3.1.3 clean publishSigned sonatypeBundleRelease
- run:
name: Publish Spark 3.2.4
command: |
sbt ++2.12.12 -Dspark.version=3.2.4 clean publishSigned sonatypeBundleRelease
- run:
name: Publish Spark 3.3.3
name: Publish Spark 3.3.4
command: |
sbt ++2.12.12 -Dspark.version=3.3.3 clean publishSigned sonatypeBundleRelease
sbt ++2.12.12 -Dspark.version=3.3.4 clean publishSigned sonatypeBundleRelease
- run:
name: Publish Spark 3.4.1
name: Publish Spark 3.4.2
command: |
sbt ++2.12.12 -Dspark.version=3.4.1 clean publishSigned sonatypeBundleRelease
sbt ++2.12.12 -Dspark.version=3.4.2 clean publishSigned sonatypeBundleRelease
- run:
name: Publish Spark 3.5.0
command: |
Expand All @@ -118,17 +97,14 @@ workflows:
matrix:
parameters:
spark_version:
- 3.0.3
- 3.1.3
- 3.2.4
- 3.3.3
- 3.4.1
- 3.3.4
- 3.4.2
- 3.5.0
singlestore_image:
- singlestore/cluster-in-a-box:alma-7.6.27-51e282b615-4.0.12-1.16.1
- singlestore/cluster-in-a-box:alma-7.8.19-4263b2d130-4.0.10-1.14.4
- singlestore/cluster-in-a-box:alma-8.0.19-f48780d261-4.0.11-1.16.0
- singlestore/cluster-in-a-box:alma-8.1.26-810da32787-4.0.14-1.17.4
- singlestore/cluster-in-a-box:alma-8.1.32-e3d3cde6da-4.0.16-1.17.6
- singlestore/cluster-in-a-box:alma-8.5.7-bf633c1a54-4.0.17-1.17.8
publish:
jobs:
- approve-publish:
Expand Down
4 changes: 2 additions & 2 deletions Layerfile
Expand Up @@ -39,9 +39,9 @@ MEMORY 8G
MEMORY 12G
MEMORY 16G

# split to 25 states
# split to 13 states
# each of them will run different version of the singlestore and spark
SPLIT 25
SPLIT 13

# copy the entire git repository
COPY . .
Expand Down
14 changes: 4 additions & 10 deletions build.sbt
Expand Up @@ -8,11 +8,8 @@ val sparkVersion = sys.props.get("spark.version").getOrElse("3.5.0")
val scalaVersionStr = "2.12.12"
val scalaVersionPrefix = scalaVersionStr.substring(0, 4)
val jacksonDatabindVersion = sparkVersion match {
case "3.0.3" => "2.10.0"
case "3.1.3" => "2.10.0"
case "3.2.4" => "2.12.3"
case "3.3.3" => "2.13.4.2"
case "3.4.1" => "2.14.2"
case "3.3.4" => "2.13.4.2"
case "3.4.2" => "2.14.2"
case "3.5.0" => "2.15.2"
}

Expand All @@ -25,11 +22,8 @@ lazy val root = project
organization := "com.singlestore",
scalaVersion := scalaVersionStr,
Compile / unmanagedSourceDirectories += (Compile / sourceDirectory).value / (sparkVersion match {
case "3.0.3" => "scala-sparkv3.0"
case "3.1.3" => "scala-sparkv3.1"
case "3.2.4" => "scala-sparkv3.2"
case "3.3.3" => "scala-sparkv3.3"
case "3.4.1" => "scala-sparkv3.4"
case "3.3.4" => "scala-sparkv3.3"
case "3.4.2" => "scala-sparkv3.4"
case "3.5.0" => "scala-sparkv3.5"
}),
version := s"4.1.5-spark-${sparkVersion}",
Expand Down
65 changes: 25 additions & 40 deletions scripts/define-layerci-matrix.sh
@@ -1,52 +1,37 @@
#!/usr/bin/env bash
set -eu

SINGLESTORE_IMAGE_TAGS=(
"alma-7.8.19-4263b2d130-4.0.10-1.14.4"
"alma-8.0.19-f48780d261-4.0.11-1.16.0"
"alma-8.1.32-e3d3cde6da-4.0.16-1.17.6"
"alma-8.5.7-bf633c1a54-4.0.17-1.17.8"
)
SINGLESTORE_IMAGE_TAGS_COUNT=${#SINGLESTORE_IMAGE_TAGS[@]}
SPARK_VERSIONS=(
"3.5.0"
"3.4.2"
"3.3.4"
)
SPARK_VERSIONS_COUNT=${#SPARK_VERSIONS[@]}

TEST_NUM=${SPLIT:-"0"}

if [ "$TEST_NUM" == '0' ] || [ "$TEST_NUM" == '1' ] || [ "$TEST_NUM" == '2' ] || [ "$TEST_NUM" == '3' ] || [ "$TEST_NUM" == '4' ] || [ "$TEST_NUM" == '5' ]
then
echo 'export SINGLESTORE_IMAGE="singlestore/cluster-in-a-box:alma-7.6.27-51e282b615-4.0.12-1.16.1"'
elif [ "$TEST_NUM" == '6' ] || [ "$TEST_NUM" == '7' ] || [ "$TEST_NUM" == '8' ] || [ "$TEST_NUM" == '9' ] || [ "$TEST_NUM" == '10' ] || [ "$TEST_NUM" == '11' ]
then
echo 'export SINGLESTORE_IMAGE="singlestore/cluster-in-a-box:alma-7.8.19-4263b2d130-4.0.10-1.14.4"'
elif [ "$TEST_NUM" == '12' ] || [ "$TEST_NUM" == '13' ] || [ "$TEST_NUM" == '14' ] || [ "$TEST_NUM" == '15' ] || [ "$TEST_NUM" == '16' ] || [ "$TEST_NUM" == '17' ]
then
echo 'export SINGLESTORE_IMAGE="singlestore/cluster-in-a-box:alma-8.0.19-f48780d261-4.0.11-1.16.0"'
else
echo 'export SINGLESTORE_IMAGE="singlestore/cluster-in-a-box:alma-8.1.26-810da32787-4.0.14-1.17.4"'
fi
SINGLESTORE_IMAGE_TAG_INDEX=$(( $TEST_NUM / $SPARK_VERSIONS_COUNT))
SINGLESTORE_IMAGE_TAG_INDEX=$((SINGLESTORE_IMAGE_TAG_INDEX>=SINGLESTORE_IMAGE_TAGS_COUNT ? SINGLESTORE_IMAGE_TAGS_COUNT-1 : SINGLESTORE_IMAGE_TAG_INDEX))
SINGLESTORE_IMAGE_TAG=${SINGLESTORE_IMAGE_TAGS[SINGLESTORE_IMAGE_TAG_INDEX]}

if [ "$TEST_NUM" == '24' ]
SPARK_VERSION_INDEX=$(( $TEST_NUM % $SPARK_VERSIONS_COUNT))
SPARK_VERSION=${SPARK_VERSIONS[SPARK_VERSION_INDEX]}

if [ $TEST_NUM == $(($SINGLESTORE_IMAGE_TAGS_COUNT*$SPARK_VERSIONS_COUNT)) ]
then
echo 'export FORCE_READ_FROM_LEAVES=TRUE'
else
echo 'export FORCE_READ_FROM_LEAVES=FALSE'
fi

if [ "$TEST_NUM" == '0' ] || [ "$TEST_NUM" == '6' ] || [ "$TEST_NUM" == '12' ] || [ "$TEST_NUM" == '18' ]
then
echo 'export SPARK_VERSION="3.0.3"'
echo 'export TEST_FILTER="testOnly -- -l ExcludeFromSpark30"'
elif [ "$TEST_NUM" == '1' ] || [ "$TEST_NUM" == '7' ] || [ "$TEST_NUM" == '13' ] || [ "$TEST_NUM" == '19' ]
then
echo 'export SPARK_VERSION="3.1.3"'
echo 'export TEST_FILTER="testOnly -- -l ExcludeFromSpark31"'
elif [ "$TEST_NUM" == '2' ] || [ "$TEST_NUM" == '8' ] || [ "$TEST_NUM" == '14' ] || [ "$TEST_NUM" == '20' ]
then
echo 'export SPARK_VERSION="3.2.4"'
echo 'export TEST_FILTER="testOnly -- -l ExcludeFromSpark32"'
elif [ "$TEST_NUM" == '3' ] || [ "$TEST_NUM" == '9' ] || [ "$TEST_NUM" == '15' ] || [ "$TEST_NUM" == '21' ]
then
echo 'export SPARK_VERSION="3.3.3"'
echo 'export TEST_FILTER="testOnly -- -l ExcludeFromSpark33"'
elif [ "$TEST_NUM" == '4' ] || [ "$TEST_NUM" == '10' ] || [ "$TEST_NUM" == '16' ] || [ "$TEST_NUM" == '22' ]
then
echo 'export SPARK_VERSION="3.4.1"'
echo 'export TEST_FILTER="testOnly -- -l ExcludeFromSpark34"'
else
echo 'export SPARK_VERSION="3.5.0"'
echo 'export TEST_FILTER="testOnly -- -l ExcludeFromSpark35"'
fi


echo 'export SCALA_VERSION="2.12.12"'
echo "export SINGLESTORE_IMAGE='singlestore/cluster-in-a-box:$SINGLESTORE_IMAGE_TAG'"
echo "export SPARK_VERSION='$SPARK_VERSION'"
echo "export TEST_FILTER='testOnly -- -l ExcludeFromSpark${SPARK_VERSION:0:1}${SPARK_VERSION:2:1}'"
echo "export SCALA_VERSION='2.12.12'"
10 changes: 6 additions & 4 deletions scripts/setup-cluster.sh
Expand Up @@ -67,9 +67,11 @@ echo "Restarting cluster"
docker exec -it ${CONTAINER_NAME} memsqlctl restart-node --yes --all
singlestore-wait-start
echo "Setting up root-ssl user"
mysql -u root -h 127.0.0.1 -P 5506 -p"${SINGLESTORE_PASSWORD}" -e 'grant all privileges on *.* to "root-ssl"@"%" require ssl with grant option'
mysql -u root -h 127.0.0.1 -P 5507 -p"${SINGLESTORE_PASSWORD}" -e 'grant all privileges on *.* to "root-ssl"@"%" require ssl with grant option'
mysql -u root -h 127.0.0.1 -P 5508 -p"${SINGLESTORE_PASSWORD}" -e 'grant all privileges on *.* to "root-ssl"@"%" require ssl with grant option'
mysql -u root -h 127.0.0.1 -P 5506 -p"${SINGLESTORE_PASSWORD}" -e 'create user "root-ssl"@"%" require ssl'
mysql -u root -h 127.0.0.1 -P 5506 -p"${SINGLESTORE_PASSWORD}" -e 'grant all privileges on *.* to "root-ssl" with grant option'
mysql -u root -h 127.0.0.1 -P 5507 -p"${SINGLESTORE_PASSWORD}" -e 'create user "root-ssl"@"%" require ssl'
mysql -u root -h 127.0.0.1 -P 5507 -p"${SINGLESTORE_PASSWORD}" -e 'grant all privileges on *.* to "root-ssl" with grant option'
mysql -u root -h 127.0.0.1 -P 5508 -p"${SINGLESTORE_PASSWORD}" -e 'grant all privileges on *.* to "root-ssl" with grant option'
echo "Done!"
echo "Setting up root-jwt user"
mysql -h 127.0.0.1 -u root -P 5506 -p"${SINGLESTORE_PASSWORD}" -e "CREATE USER 'test_jwt_user' IDENTIFIED WITH authentication_jwt"
Expand All @@ -93,4 +95,4 @@ if [[ ${CONTAINER_IP} != "${CURRENT_AGG_IP}" ]]; then
# add aggregator with correct ip
mysql -u root -h 127.0.0.1 -P 5506 -p"${SINGLESTORE_PASSWORD}" --batch -N -e "add aggregator root:'${SINGLESTORE_PASSWORD}'@'${CONTAINER_IP}':3308"
fi
echo "Done!"
echo "Done!"

This file was deleted.

This file was deleted.

0 comments on commit c37e884

Please sign in to comment.