From 21e683155352f34ffd73e04b082c55a2f15bebbb Mon Sep 17 00:00:00 2001 From: Bereng Date: Thu, 25 May 2023 08:32:02 +0200 Subject: [PATCH 1/2] Add -Dno-checkstyle to circle repeats --- .circleci/config.yml | 44 +++++++++++++++++------------------ .circleci/config.yml.FREE | 44 +++++++++++++++++------------------ .circleci/config.yml.PAID | 44 +++++++++++++++++------------------ .circleci/config_template.yml | 4 ++-- 4 files changed, 68 insertions(+), 68 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b009cd1f3cd4..5859300ad9e3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -172,7 +172,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -876,7 +876,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -1198,7 +1198,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -1630,7 +1630,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -1792,7 +1792,7 @@ jobs: # run the test status="passes" - if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true | tee stdout.txt ); then + if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | tee stdout.txt ); then status="fails" exit_code=1 fi @@ -2448,7 +2448,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -3970,7 +3970,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -4465,7 +4465,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -4714,7 +4714,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -4805,7 +4805,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -5237,7 +5237,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_SIMULATOR_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_SIMULATOR_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_SIMULATOR_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-simulator-dtest\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-simulator-dtest $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_SIMULATOR_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_SIMULATOR_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_SIMULATOR_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-simulator-dtest\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-simulator-dtest $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -5439,7 +5439,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -5775,7 +5775,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -6306,7 +6306,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_UPGRADE_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_UPGRADE_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_UPGRADE_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_UPGRADE_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_UPGRADE_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_UPGRADE_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -6508,7 +6508,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -6711,7 +6711,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -7127,7 +7127,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -7565,7 +7565,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -7835,7 +7835,7 @@ jobs: # run the test status="passes" - if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true | tee stdout.txt ); then + if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | tee stdout.txt ); then status="fails" exit_code=1 fi @@ -8733,7 +8733,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -9071,7 +9071,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -9296,7 +9296,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: diff --git a/.circleci/config.yml.FREE b/.circleci/config.yml.FREE index b009cd1f3cd4..5859300ad9e3 100644 --- a/.circleci/config.yml.FREE +++ b/.circleci/config.yml.FREE @@ -172,7 +172,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -876,7 +876,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -1198,7 +1198,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -1630,7 +1630,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -1792,7 +1792,7 @@ jobs: # run the test status="passes" - if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true | tee stdout.txt ); then + if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | tee stdout.txt ); then status="fails" exit_code=1 fi @@ -2448,7 +2448,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -3970,7 +3970,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -4465,7 +4465,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -4714,7 +4714,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -4805,7 +4805,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -5237,7 +5237,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_SIMULATOR_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_SIMULATOR_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_SIMULATOR_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-simulator-dtest\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-simulator-dtest $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_SIMULATOR_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_SIMULATOR_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_SIMULATOR_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-simulator-dtest\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-simulator-dtest $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -5439,7 +5439,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -5775,7 +5775,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -6306,7 +6306,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_UPGRADE_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_UPGRADE_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_UPGRADE_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_UPGRADE_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_UPGRADE_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_UPGRADE_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -6508,7 +6508,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -6711,7 +6711,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -7127,7 +7127,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -7565,7 +7565,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -7835,7 +7835,7 @@ jobs: # run the test status="passes" - if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true | tee stdout.txt ); then + if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | tee stdout.txt ); then status="fails" exit_code=1 fi @@ -8733,7 +8733,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -9071,7 +9071,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -9296,7 +9296,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: diff --git a/.circleci/config.yml.PAID b/.circleci/config.yml.PAID index 3118f379e214..3f2c18e71946 100644 --- a/.circleci/config.yml.PAID +++ b/.circleci/config.yml.PAID @@ -172,7 +172,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -876,7 +876,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -1198,7 +1198,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -1630,7 +1630,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -1792,7 +1792,7 @@ jobs: # run the test status="passes" - if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true | tee stdout.txt ); then + if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | tee stdout.txt ); then status="fails" exit_code=1 fi @@ -2448,7 +2448,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -3970,7 +3970,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -4465,7 +4465,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -4714,7 +4714,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -4805,7 +4805,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -5237,7 +5237,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_SIMULATOR_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_SIMULATOR_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_SIMULATOR_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-simulator-dtest\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-simulator-dtest $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_SIMULATOR_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_SIMULATOR_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_SIMULATOR_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-simulator-dtest\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-simulator-dtest $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -5439,7 +5439,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -5775,7 +5775,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -6306,7 +6306,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_UPGRADE_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_UPGRADE_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_UPGRADE_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_UPGRADE_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_UPGRADE_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_UPGRADE_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -6508,7 +6508,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -6711,7 +6711,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -7127,7 +7127,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -7565,7 +7565,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -7835,7 +7835,7 @@ jobs: # run the test status="passes" - if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true | tee stdout.txt ); then + if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | tee stdout.txt ); then status="fails" exit_code=1 fi @@ -8733,7 +8733,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -9071,7 +9071,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: @@ -9296,7 +9296,7 @@ jobs: - run: name: Repeatedly run new or modifed JUnit tests no_output_timeout: 15m - command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" + command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s///\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=build/test/logs/${testtag}\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n" - store_test_results: path: /tmp/results/repeated_utests/output - store_artifacts: diff --git a/.circleci/config_template.yml b/.circleci/config_template.yml index 4367bdcd23d5..27ad35c43a74 100644 --- a/.circleci/config_template.yml +++ b/.circleci/config_template.yml @@ -3215,7 +3215,7 @@ commands: # run the test status="passes" if !( set -o pipefail && \ - ant <> $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \ + ant <> $name_arg $methods_arg $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | \ tee stdout.txt \ ); then status="fails" @@ -3350,7 +3350,7 @@ commands: # run the test status="passes" - if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true | tee stdout.txt ); then + if !( set -o pipefail && ant $target $name $methods $vnodes_args -Dno-build-test=true -Dno-checkstyle=true | tee stdout.txt ); then status="fails" exit_code=1 fi From 1079bd258284f404254b497750c51e03e06de7a1 Mon Sep 17 00:00:00 2001 From: Bereng Date: Thu, 25 May 2023 08:35:12 +0200 Subject: [PATCH 2/2] CIRCLE REPEAT TEST DO NOT MERGE --- .circleci/config.yml | 472 +++++++++++++++++++++++++++---------------- 1 file changed, 294 insertions(+), 178 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5859300ad9e3..5d500eec8d6c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -110,8 +110,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -201,8 +201,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -267,8 +267,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -378,8 +378,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -534,8 +534,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -621,8 +621,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -740,8 +740,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -814,8 +814,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -905,8 +905,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -1016,8 +1016,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -1136,8 +1136,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -1227,8 +1227,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -1346,8 +1346,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -1457,8 +1457,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -1567,8 +1567,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -1659,8 +1659,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -1855,8 +1855,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -1942,8 +1942,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2077,8 +2077,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2188,8 +2188,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2298,8 +2298,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2385,8 +2385,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2477,8 +2477,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2588,8 +2588,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2698,8 +2698,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2764,8 +2764,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2883,8 +2883,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -2957,8 +2957,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -3077,8 +3077,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -3211,8 +3211,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -3345,8 +3345,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -3480,8 +3480,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -3599,8 +3599,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -3715,8 +3715,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -3834,8 +3834,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -3907,8 +3907,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -3999,8 +3999,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -4085,8 +4085,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -4158,8 +4158,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -4269,8 +4269,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -4403,8 +4403,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -4494,8 +4494,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -4651,8 +4651,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -4743,8 +4743,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -4834,8 +4834,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -4953,8 +4953,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -5064,8 +5064,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -5174,8 +5174,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -5266,8 +5266,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -5376,8 +5376,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -5468,8 +5468,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -5602,8 +5602,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -5713,8 +5713,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -5804,8 +5804,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -5938,8 +5938,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6048,8 +6048,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6135,8 +6135,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6243,8 +6243,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6335,8 +6335,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6445,8 +6445,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6537,8 +6537,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6648,8 +6648,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6740,8 +6740,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6826,8 +6826,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -6945,8 +6945,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7064,8 +7064,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7156,8 +7156,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7265,8 +7265,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7352,8 +7352,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7438,8 +7438,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7502,8 +7502,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7594,8 +7594,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7702,8 +7702,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7898,8 +7898,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -7971,8 +7971,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -8090,8 +8090,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -8200,8 +8200,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -8334,8 +8334,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -8453,8 +8453,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -8561,8 +8561,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -8671,8 +8671,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -8762,8 +8762,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -8836,8 +8836,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -8900,8 +8900,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -9008,8 +9008,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -9100,8 +9100,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -9234,8 +9234,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -9325,8 +9325,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -9431,8 +9431,8 @@ jobs: - CCM_MAX_HEAP_SIZE: 1024M - CCM_HEAP_NEWSIZE: 256M - REPEATED_TESTS_STOP_ON_FAILURE: false - - REPEATED_UTESTS: null - - REPEATED_UTESTS_COUNT: 500 + - REPEATED_UTESTS: org.apache.cassandra.config.DatabaseDescriptorRefTest + - REPEATED_UTESTS_COUNT: 10 - REPEATED_UTESTS_FQLTOOL: null - REPEATED_UTESTS_FQLTOOL_COUNT: 500 - REPEATED_UTESTS_LONG: null @@ -9473,6 +9473,12 @@ workflows: requires: - start_j8_unit_tests - j8_build + - start_j8_unit_tests_repeat: + type: approval + - j8_unit_tests_repeat: + requires: + - start_j8_unit_tests_repeat + - j8_build - start_j8_jvm_dtests: type: approval - j8_jvm_dtests: @@ -9533,6 +9539,12 @@ workflows: requires: - start_j11_unit_tests - j8_build + - start_j11_unit_tests_repeat: + type: approval + - j11_unit_tests_repeat: + requires: + - start_j11_unit_tests_repeat + - j8_build - start_j8_utests_long: type: approval - j8_utests_long: @@ -9557,6 +9569,18 @@ workflows: requires: - start_j11_utests_cdc - j8_build + - start_j8_utests_cdc_repeat: + type: approval + - j8_utests_cdc_repeat: + requires: + - start_j8_utests_cdc_repeat + - j8_build + - start_j11_utests_cdc_repeat: + type: approval + - j11_utests_cdc_repeat: + requires: + - start_j11_utests_cdc_repeat + - j8_build - start_j8_utests_compression: type: approval - j8_utests_compression: @@ -9569,6 +9593,18 @@ workflows: requires: - start_j11_utests_compression - j8_build + - start_j8_utests_compression_repeat: + type: approval + - j8_utests_compression_repeat: + requires: + - start_j8_utests_compression_repeat + - j8_build + - start_j11_utests_compression_repeat: + type: approval + - j11_utests_compression_repeat: + requires: + - start_j11_utests_compression_repeat + - j8_build - start_j8_utests_stress: type: approval - j8_utests_stress: @@ -9605,6 +9641,18 @@ workflows: requires: - start_j11_utests_system_keyspace_directory - j8_build + - start_j8_utests_system_keyspace_directory_repeat: + type: approval + - j8_utests_system_keyspace_directory_repeat: + requires: + - start_j8_utests_system_keyspace_directory_repeat + - j8_build + - start_j11_utests_system_keyspace_directory_repeat: + type: approval + - j11_utests_system_keyspace_directory_repeat: + requires: + - start_j11_utests_system_keyspace_directory_repeat + - j8_build - start_j8_dtest_jars_build: type: approval - j8_dtest_jars_build: @@ -9773,6 +9821,9 @@ workflows: - j8_unit_tests: requires: - j8_build + - j8_unit_tests_repeat: + requires: + - j8_build - j8_simulator_dtests: requires: - j8_build @@ -9803,6 +9854,9 @@ workflows: - j11_unit_tests: requires: - j8_build + - j11_unit_tests_repeat: + requires: + - j8_build - start_utests_long: type: approval - j8_utests_long: @@ -9823,6 +9877,14 @@ workflows: requires: - start_utests_cdc - j8_build + - j8_utests_cdc_repeat: + requires: + - start_utests_cdc + - j8_build + - j11_utests_cdc_repeat: + requires: + - start_utests_cdc + - j8_build - start_utests_compression: type: approval - j8_utests_compression: @@ -9833,6 +9895,14 @@ workflows: requires: - start_utests_compression - j8_build + - j8_utests_compression_repeat: + requires: + - start_utests_compression + - j8_build + - j11_utests_compression_repeat: + requires: + - start_utests_compression + - j8_build - start_utests_stress: type: approval - j8_utests_stress: @@ -9862,6 +9932,13 @@ workflows: requires: - start_utests_system_keyspace_directory - j8_build + - j8_utests_system_keyspace_directory_repeat: + requires: + - j8_build + - j11_utests_system_keyspace_directory_repeat: + requires: + - start_utests_system_keyspace_directory + - j8_build - start_jvm_upgrade_dtests: type: approval - j8_dtest_jars_build: @@ -9998,6 +10075,12 @@ workflows: requires: - start_j11_unit_tests - j11_build + - start_j11_unit_tests_repeat: + type: approval + - j11_unit_tests_repeat: + requires: + - start_j11_unit_tests_repeat + - j11_build - start_j11_jvm_dtests: type: approval - j11_jvm_dtests: @@ -10104,12 +10187,24 @@ workflows: requires: - start_j11_utests_cdc - j11_build + - start_j11_utests_cdc_repeat: + type: approval + - j11_utests_cdc_repeat: + requires: + - start_j11_utests_cdc_repeat + - j11_build - start_j11_utests_compression: type: approval - j11_utests_compression: requires: - start_j11_utests_compression - j11_build + - start_j11_utests_compression_repeat: + type: approval + - j11_utests_compression_repeat: + requires: + - start_j11_utests_compression_repeat + - j11_build - start_j11_utests_stress: type: approval - j11_utests_stress: @@ -10128,6 +10223,12 @@ workflows: requires: - start_j11_utests_system_keyspace_directory - j11_build + - start_j11_utests_system_keyspace_directory_repeat: + type: approval + - j11_utests_system_keyspace_directory_repeat: + requires: + - start_j11_utests_system_keyspace_directory_repeat + - j11_build java11_pre-commit_tests: jobs: - start_pre-commit_tests: @@ -10138,6 +10239,9 @@ workflows: - j11_unit_tests: requires: - j11_build + - j11_unit_tests_repeat: + requires: + - j11_build - j11_jvm_dtests: requires: - j11_build @@ -10216,12 +10320,20 @@ workflows: requires: - start_utests_cdc - j11_build + - j11_utests_cdc_repeat: + requires: + - start_utests_cdc + - j11_build - start_utests_compression: type: approval - j11_utests_compression: requires: - start_utests_compression - j11_build + - j11_utests_compression_repeat: + requires: + - start_utests_compression + - j11_build - start_utests_stress: type: approval - j11_utests_stress: @@ -10240,3 +10352,7 @@ workflows: requires: - start_utests_system_keyspace_directory - j11_build + - j11_utests_system_keyspace_directory_repeat: + requires: + - start_utests_system_keyspace_directory + - j11_build