From 980c2c1f3624c3c4f10623f9e35c5b481ab9364a Mon Sep 17 00:00:00 2001 From: shuke <37901441+shuke987@users.noreply.github.com> Date: Sat, 11 May 2024 21:50:16 +0800 Subject: [PATCH 1/5] 1 --- .../doris/regression/suite/Suite.groovy | 64 +++++++++++++++++++ .../test_trino_different_parquet_types.groovy | 52 +++------------ .../hive/test_trino_hive_orc.groovy | 50 +++------------ .../hive/test_trino_hive_other.groovy | 51 +++------------ .../hive/test_trino_hive_parquet.groovy | 52 +++------------ .../test_trino_hive_schema_evolution.groovy | 52 +++------------ .../hive/test_trino_hive_serde_prop.groovy | 52 +++------------ .../test_trino_hive_tablesample_p0.groovy | 52 +++------------ .../hive/test_trino_hive_tpch_sf1_orc.groovy | 51 +++------------ .../test_trino_hive_tpch_sf1_parquet.groovy | 51 +++------------ ...est_trino_prepare_hive_data_in_case.groovy | 52 +++------------ .../test_plugins_download.groovy | 50 +++------------ 12 files changed, 152 insertions(+), 477 deletions(-) diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index d5eb22756efb4a..b96fa7700b5d50 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -89,6 +89,7 @@ class Suite implements GroovyInterceptable { final List finishCallbacks = new Vector<>() final List lazyCheckExceptions = new Vector<>() final List lazyCheckFutures = new Vector<>() + static Boolean isTrinoConnectorDownloaded = false Suite(String name, String group, SuiteContext context, SuiteCluster cluster) { this.name = name @@ -766,6 +767,69 @@ class Suite implements GroovyInterceptable { Assert.assertEquals(0, code) } + /* + * download trino connectors, and sends to every fe and be. + * There are 3 configures to support this: trino_connectors in regression-conf.groovy, and trino_connector_plugin_dir in be and fe. + * fe and be's config must satisfy regression-conf.groovy's config. + * e.g. in regression-conf.groovy, trino_connectors = "/tmp/trino_connector", then in be.conf and fe.conf, must set trino_connector_plugin_dir="/tmp/trino_connector/connectors" + * + * this function must be not reentrant. + * + * If failed, will call assertTrue(false). + */ + static synchronized void dispatchTrinoConnectors(host_ips) { + if (isTrinoConnectorDownloaded == true) { + logger.info("trino connector downloaded") + return + } + + def dir_download = context.config.otherConfigs.get("trinoPluginsPath") + Assert.assertTrue(!dir_download.isEmpty()) + def path_tar = "${dir_download}/trino-connectors.tar.gz" + // extract to a tmp direcotry, and then scp to every host_ips, including self. + def dir_connector_tmp = "${dir_download}/connectors_tmp" + def path_connector_tmp = "${dir_connector_tmp}/connectors" + def path_connector = "${dir_download}/connectors" + def s3_url = getS3Url() + + def cmds = [] as List + cmds.add("mkdir -p ${dir_download}") + cmds.add("rm -rf ${path_tar}") + cmds.add("rm -rf ${dir_connector_tmp}") + cmds.add("mkdir -p ${dir_connector_tmp}") + cmds.add("/usr/bin/curl --max-time 600 ${s3_url}/regression/trino-connectors.tar.gz --output ${path_tar}") + cmds.add("tar -zxvf ${path_tar} -C ${dir_connector_tmp}") + + def executeCommand = { String cmd, Boolean mustSuc -> + try { + logger.info("execute ${cmd}") + def proc = cmd.execute() + // if timeout, exception will be thrown + proc.waitForOrKill(900 * 1000) + logger.info("execute result ${proc.getText()}.") + if (mustSuc == true) { + Assert.assertEquals(0, proc.exitValue()) + } + } catch (IOException e) { + Assert.assertTrue(false, "execute timeout") + } + } + + for (def cmd in cmds) { + executeCommand(cmd, true) + } + + host_ips = host_ips.unique() + for (def ip in host_ips) { + logger.info("scp to ${ip}") + executeCommand("ssh -o StrictHostKeyChecking=no root@${ip} \"rm -rf ${path_connector}\"", false) + scpFiles("root", ip, path_connector_tmp, path_connector, false) // if failed, assertTrue(false) is executed. + } + + isTrinoConnectorDownloaded = true + logger.info("dispatch trino connector to ${dir_download} succeed") + } + void mkdirRemote(String username, String host, String path) { String cmd = "ssh ${username}@${host} 'mkdir -p ${path}'" logger.info("Execute: ${cmd}".toString()) diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy index 44dd015cfc34c6..63dca63c422a52 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_different_parquet_types.groovy @@ -16,52 +16,16 @@ // under the License. suite("test_trino_different_parquet_types", "p0,external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } - - - + dispatchTrinoConnectors(host_ips.unique()) String hms_port = context.config.otherConfigs.get("hive2HmsPort") String hdfs_port = context.config.otherConfigs.get("hive2HdfsPort") diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy index 22b7724d9545ae..602169b7975b35 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_orc.groovy @@ -16,50 +16,16 @@ // under the License. suite("test_trino_hive_orc", "all_types,external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } + dispatchTrinoConnectors(host_ips.unique()) // Ensure that all types are parsed correctly def select_top50 = { diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy index 0c66fbbc29bfec..427951d0599636 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_other.groovy @@ -16,51 +16,16 @@ // under the License. suite("test_trino_hive_other", "external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } - + dispatchTrinoConnectors(host_ips.unique()) def q01 = { qt_q24 """ select name, count(1) as c from student group by name order by name desc;""" diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy index 4f37ebc52bc634..a060311e903031 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_parquet.groovy @@ -16,52 +16,16 @@ // under the License. suite("test_trino_hive_parquet", "p0,external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } - - + dispatchTrinoConnectors(host_ips.unique()) def q01 = { qt_q01 """ diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy index 7fe152ccff6b76..a3662eb4b0c0dd 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_schema_evolution.groovy @@ -16,52 +16,16 @@ // under the License. suite("test_trino_hive_schema_evolution", "p0,external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } - - + dispatchTrinoConnectors(host_ips.unique()) def q_text = { qt_q01 """ diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy index d3437417f22374..c2caf784e45f0c 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_serde_prop.groovy @@ -16,52 +16,16 @@ // under the License. suite("test_trino_hive_serde_prop", "external_docker,hive,external_docker_hive,p0,external") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } - - + dispatchTrinoConnectors(host_ips.unique()) String enabled = context.config.otherConfigs.get("enableHiveTest") if (enabled != null && enabled.equalsIgnoreCase("true")) { diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy index 1a834b35e02836..83c358e05fc5a8 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tablesample_p0.groovy @@ -16,52 +16,16 @@ // under the License. suite("test_trino_hive_tablesample_p0", "all_types,p0,external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } - - + dispatchTrinoConnectors(host_ips.unique()) String enabled = context.config.otherConfigs.get("enableHiveTest") if (enabled != null && enabled.equalsIgnoreCase("true")) { diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy index d8de7faefbc1cc..76769332f316f9 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_orc.groovy @@ -16,51 +16,16 @@ // under the License. suite("test_trino_hive_tpch_sf1_orc", "p0,external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } - - + dispatchTrinoConnectors(host_ips.unique()) String enable_file_cache = "false" def q01 = { diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy index 0675e28bee2093..e09782771fa8c7 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_hive_tpch_sf1_parquet.groovy @@ -16,51 +16,16 @@ // under the License. suite("test_trino_hive_tpch_sf1_parquet", "p0,external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } - - + dispatchTrinoConnectors(host_ips.unique()) String enable_file_cache = "false" def q01 = { diff --git a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy index 037ad855cf2e67..b465c2373d2800 100644 --- a/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/hive/test_trino_prepare_hive_data_in_case.groovy @@ -16,52 +16,16 @@ // under the License. suite("test_trino_prepare_hive_data_in_case", "p0,external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } - - + dispatchTrinoConnectors(host_ips.unique()) String enabled = context.config.otherConfigs.get("enableHiveTest") def catalog_name = "test_trino_prepare_hive_data_in_case" diff --git a/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy b/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy index acf9f4558df18f..3d28612cf62ffd 100644 --- a/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy +++ b/regression-test/suites/external_table_p0/trino_connector/test_plugins_download.groovy @@ -16,48 +16,14 @@ // under the License. suite("test_plugins_download", "external,hive,external_docker,external_docker_hive") { - def trino_connector_download_dir = context.config.otherConfigs.get("trinoPluginsPath") - - // mkdir trino_connector_download_dir - logger.info("start create dir ${trino_connector_download_dir} ...") - def mkdir_connectors_tar = "mkdir -p ${trino_connector_download_dir}".execute().getText() - logger.info("finish create dir, result: ${mkdir_connectors_tar} ...") - - - def plugins_compression = "${trino_connector_download_dir}/trino-connectors.tar.gz" - def plugins_dir = "${trino_connector_download_dir}/connectors" - - // download trino-connectors.tar.gz - File path = new File("${plugins_compression}") - if (path.exists() && path.isFile()) { - logger.info("${plugins_compression} has been downloaded") - } else { - logger.info("start delete trino-connector plugins dir ...") - def delete_local_connectors_tar = "rm -r ${plugins_dir}".execute() - logger.info("start download trino-connector plugins ...") - def s3_url = getS3Url() - - logger.info("getS3Url ==== ${s3_url}") - def download_connectors_tar = "/usr/bin/curl ${s3_url}/regression/trino-connectors.tar.gz --output ${plugins_compression}" - logger.info("download cmd : ${download_connectors_tar}") - def run_download_connectors_cmd = download_connectors_tar.execute().getText() - logger.info("result: ${run_download_connectors_cmd}") - logger.info("finish download ${plugins_compression} ...") + def host_ips = new ArrayList() + String[][] backends = sql """ show backends """ + for (def b in backends) { + host_ips.add(b[1]) } - - // decompression trino-plugins.tar.gz - File dir = new File("${plugins_dir}") - if (dir.exists() && dir.isDirectory()) { - logger.info("${plugins_dir} dir has been decompressed") - } else { - if (path.exists() && path.isFile()) { - def run_cmd = "tar -zxvf ${plugins_compression} -C ${trino_connector_download_dir}" - logger.info("run_cmd : $run_cmd") - def run_decompress_cmd = run_cmd.execute().getText() - logger.info("result: $run_decompress_cmd") - } else { - logger.info("${plugins_compression} is not exist or is not a file.") - throw exception - } + String [][] frontends = sql """ show frontends """ + for (def f in frontends) { + host_ips.add(f[1]) } + dispatchTrinoConnectors(host_ips.unique()) } \ No newline at end of file From 1a14a9f1e74fe9204691aed2dba2fc1f154de4e6 Mon Sep 17 00:00:00 2001 From: shuke <37901441+shuke987@users.noreply.github.com> Date: Sat, 11 May 2024 21:56:56 +0800 Subject: [PATCH 2/5] 2 --- .../main/groovy/org/apache/doris/regression/suite/Suite.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index b96fa7700b5d50..d42262d59aa463 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -777,7 +777,7 @@ class Suite implements GroovyInterceptable { * * If failed, will call assertTrue(false). */ - static synchronized void dispatchTrinoConnectors(host_ips) { + static synchronized void dispatchTrinoConnectors(ArrayList host_ips) { if (isTrinoConnectorDownloaded == true) { logger.info("trino connector downloaded") return From 334c942e588aa1bad8cbacd70698405c86cb1a8e Mon Sep 17 00:00:00 2001 From: shuke <37901441+shuke987@users.noreply.github.com> Date: Sun, 12 May 2024 00:02:50 +0800 Subject: [PATCH 3/5] 2 --- .../apache/doris/regression/suite/Suite.groovy | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index d42262d59aa463..56620d6dc99dee 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -80,6 +80,7 @@ class Suite implements GroovyInterceptable { final String name final String group final Logger logger = LoggerFactory.getLogger(this.class) + private static final Logger staticLogger = LoggerFactory.getLogger(Suite.class); // set this in suite to determine which hive docker to use String hivePrefix = "hive2" @@ -756,12 +757,12 @@ class Suite implements GroovyInterceptable { return s3Url } - void scpFiles(String username, String host, String files, String filePath, boolean fromDst=true) { + static void scpFiles(String username, String host, String files, String filePath, boolean fromDst=true) { String cmd = "scp -o StrictHostKeyChecking=no -r ${username}@${host}:${files} ${filePath}" if (!fromDst) { cmd = "scp -o StrictHostKeyChecking=no -r ${files} ${username}@${host}:${filePath}" } - logger.info("Execute: ${cmd}".toString()) + staticLogger.info("Execute: ${cmd}".toString()) Process process = cmd.execute() def code = process.waitFor() Assert.assertEquals(0, code) @@ -779,7 +780,7 @@ class Suite implements GroovyInterceptable { */ static synchronized void dispatchTrinoConnectors(ArrayList host_ips) { if (isTrinoConnectorDownloaded == true) { - logger.info("trino connector downloaded") + staticLogger.info("trino connector downloaded") return } @@ -802,11 +803,11 @@ class Suite implements GroovyInterceptable { def executeCommand = { String cmd, Boolean mustSuc -> try { - logger.info("execute ${cmd}") + staticLogger.info("execute ${cmd}") def proc = cmd.execute() // if timeout, exception will be thrown proc.waitForOrKill(900 * 1000) - logger.info("execute result ${proc.getText()}.") + staticLogger.info("execute result ${proc.getText()}.") if (mustSuc == true) { Assert.assertEquals(0, proc.exitValue()) } @@ -821,13 +822,13 @@ class Suite implements GroovyInterceptable { host_ips = host_ips.unique() for (def ip in host_ips) { - logger.info("scp to ${ip}") + staticLogger.info("scp to ${ip}") executeCommand("ssh -o StrictHostKeyChecking=no root@${ip} \"rm -rf ${path_connector}\"", false) scpFiles("root", ip, path_connector_tmp, path_connector, false) // if failed, assertTrue(false) is executed. } isTrinoConnectorDownloaded = true - logger.info("dispatch trino connector to ${dir_download} succeed") + staticLogger.info("dispatch trino connector to ${dir_download} succeed") } void mkdirRemote(String username, String host, String path) { From b50a7134880162c61d3721734558fc7c0b0ad3cd Mon Sep 17 00:00:00 2001 From: shuke <37901441+shuke987@users.noreply.github.com> Date: Sun, 12 May 2024 01:02:35 +0800 Subject: [PATCH 4/5] 3 --- .../org/apache/doris/regression/suite/Suite.groovy | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index 56620d6dc99dee..0e24643f87b529 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -80,7 +80,7 @@ class Suite implements GroovyInterceptable { final String name final String group final Logger logger = LoggerFactory.getLogger(this.class) - private static final Logger staticLogger = LoggerFactory.getLogger(Suite.class); + static final Logger staticLogger = LoggerFactory.getLogger(Suite.class) // set this in suite to determine which hive docker to use String hivePrefix = "hive2" @@ -768,6 +768,12 @@ class Suite implements GroovyInterceptable { Assert.assertEquals(0, code) } + void dispatchTrinoConnectors(ArrayList host_ips) + { + def dir_download = context.config.otherConfigs.get("trinoPluginsPath") + dispatchTrinoConnectors_impl(host_ips, dir_download) + } + /* * download trino connectors, and sends to every fe and be. * There are 3 configures to support this: trino_connectors in regression-conf.groovy, and trino_connector_plugin_dir in be and fe. @@ -778,13 +784,12 @@ class Suite implements GroovyInterceptable { * * If failed, will call assertTrue(false). */ - static synchronized void dispatchTrinoConnectors(ArrayList host_ips) { + static synchronized void dispatchTrinoConnectors_impl(ArrayList host_ips, String dir_download) { if (isTrinoConnectorDownloaded == true) { staticLogger.info("trino connector downloaded") return } - def dir_download = context.config.otherConfigs.get("trinoPluginsPath") Assert.assertTrue(!dir_download.isEmpty()) def path_tar = "${dir_download}/trino-connectors.tar.gz" // extract to a tmp direcotry, and then scp to every host_ips, including self. From 085850b4832be4dfd85bb5f2a37fa5c00de8d720 Mon Sep 17 00:00:00 2001 From: shuke <37901441+shuke987@users.noreply.github.com> Date: Sun, 12 May 2024 08:19:11 +0800 Subject: [PATCH 5/5] 4 --- .../org/apache/doris/regression/suite/Suite.groovy | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index 0e24643f87b529..b42f21d297f126 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -771,7 +771,9 @@ class Suite implements GroovyInterceptable { void dispatchTrinoConnectors(ArrayList host_ips) { def dir_download = context.config.otherConfigs.get("trinoPluginsPath") - dispatchTrinoConnectors_impl(host_ips, dir_download) + def s3_url = getS3Url() + def url = "${s3_url}/regression/trino-connectors.tar.gz" + dispatchTrinoConnectors_impl(host_ips, dir_download, url) } /* @@ -784,7 +786,7 @@ class Suite implements GroovyInterceptable { * * If failed, will call assertTrue(false). */ - static synchronized void dispatchTrinoConnectors_impl(ArrayList host_ips, String dir_download) { + static synchronized void dispatchTrinoConnectors_impl(ArrayList host_ips, String dir_download, String url) { if (isTrinoConnectorDownloaded == true) { staticLogger.info("trino connector downloaded") return @@ -796,14 +798,13 @@ class Suite implements GroovyInterceptable { def dir_connector_tmp = "${dir_download}/connectors_tmp" def path_connector_tmp = "${dir_connector_tmp}/connectors" def path_connector = "${dir_download}/connectors" - def s3_url = getS3Url() def cmds = [] as List cmds.add("mkdir -p ${dir_download}") cmds.add("rm -rf ${path_tar}") cmds.add("rm -rf ${dir_connector_tmp}") cmds.add("mkdir -p ${dir_connector_tmp}") - cmds.add("/usr/bin/curl --max-time 600 ${s3_url}/regression/trino-connectors.tar.gz --output ${path_tar}") + cmds.add("/usr/bin/curl --max-time 600 ${url} --output ${path_tar}") cmds.add("tar -zxvf ${path_tar} -C ${dir_connector_tmp}") def executeCommand = { String cmd, Boolean mustSuc ->